var/home/core/zuul-output/0000755000175000017500000000000015072360612014527 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015072371502015473 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000004514466615072371474017727 0ustar rootrootOct 11 04:51:17 crc systemd[1]: Starting Kubernetes Kubelet... Oct 11 04:51:18 crc restorecon[4640]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 11 04:51:18 crc restorecon[4640]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 11 04:51:18 crc restorecon[4640]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Oct 11 04:51:19 crc kubenswrapper[4651]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Oct 11 04:51:19 crc kubenswrapper[4651]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Oct 11 04:51:19 crc kubenswrapper[4651]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Oct 11 04:51:19 crc kubenswrapper[4651]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Oct 11 04:51:19 crc kubenswrapper[4651]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Oct 11 04:51:19 crc kubenswrapper[4651]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.573415 4651 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.579126 4651 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.579149 4651 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.579156 4651 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.579162 4651 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.579168 4651 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.579173 4651 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.579179 4651 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.579184 4651 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.579189 4651 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.579195 4651 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.579201 4651 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.579210 4651 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.579215 4651 feature_gate.go:330] unrecognized feature gate: NewOLM Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.579221 4651 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.579226 4651 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.579231 4651 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.579235 4651 feature_gate.go:330] unrecognized feature gate: PinnedImages Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.579240 4651 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.579245 4651 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.579250 4651 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.579255 4651 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.579260 4651 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.579265 4651 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.579269 4651 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.579274 4651 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.579279 4651 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.579284 4651 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.579288 4651 feature_gate.go:330] unrecognized feature gate: PlatformOperators Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.579293 4651 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.579298 4651 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.579303 4651 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.579310 4651 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.579317 4651 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.579322 4651 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.579327 4651 feature_gate.go:330] unrecognized feature gate: GatewayAPI Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.579332 4651 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.579337 4651 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.579342 4651 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.579346 4651 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.579353 4651 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.579359 4651 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.579365 4651 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.579370 4651 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.579375 4651 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.579380 4651 feature_gate.go:330] unrecognized feature gate: OVNObservability Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.579384 4651 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.579389 4651 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.579395 4651 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.579399 4651 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.579404 4651 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.579408 4651 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.579414 4651 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.579418 4651 feature_gate.go:330] unrecognized feature gate: InsightsConfig Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.579423 4651 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.579427 4651 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.579432 4651 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.579437 4651 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.579441 4651 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.579448 4651 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.579454 4651 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.579459 4651 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.579464 4651 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.579470 4651 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.579475 4651 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.579479 4651 feature_gate.go:330] unrecognized feature gate: SignatureStores Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.579485 4651 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.579490 4651 feature_gate.go:330] unrecognized feature gate: Example Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.579496 4651 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.579502 4651 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.579507 4651 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.579512 4651 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.579611 4651 flags.go:64] FLAG: --address="0.0.0.0" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.579623 4651 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.579637 4651 flags.go:64] FLAG: --anonymous-auth="true" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.579649 4651 flags.go:64] FLAG: --application-metrics-count-limit="100" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.579657 4651 flags.go:64] FLAG: --authentication-token-webhook="false" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.579662 4651 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.579670 4651 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.579677 4651 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.579682 4651 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.579688 4651 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.579694 4651 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.579701 4651 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.579708 4651 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.579714 4651 flags.go:64] FLAG: --cgroup-root="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.579719 4651 flags.go:64] FLAG: --cgroups-per-qos="true" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.579725 4651 flags.go:64] FLAG: --client-ca-file="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.579730 4651 flags.go:64] FLAG: --cloud-config="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.579736 4651 flags.go:64] FLAG: --cloud-provider="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.579742 4651 flags.go:64] FLAG: --cluster-dns="[]" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.579749 4651 flags.go:64] FLAG: --cluster-domain="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.579755 4651 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.579761 4651 flags.go:64] FLAG: --config-dir="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.579767 4651 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.579773 4651 flags.go:64] FLAG: --container-log-max-files="5" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.579781 4651 flags.go:64] FLAG: --container-log-max-size="10Mi" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.579787 4651 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.579794 4651 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.579801 4651 flags.go:64] FLAG: --containerd-namespace="k8s.io" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.579807 4651 flags.go:64] FLAG: --contention-profiling="false" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.579841 4651 flags.go:64] FLAG: --cpu-cfs-quota="true" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.579847 4651 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.579853 4651 flags.go:64] FLAG: --cpu-manager-policy="none" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.579859 4651 flags.go:64] FLAG: --cpu-manager-policy-options="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.579866 4651 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.579872 4651 flags.go:64] FLAG: --enable-controller-attach-detach="true" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.579878 4651 flags.go:64] FLAG: --enable-debugging-handlers="true" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.579883 4651 flags.go:64] FLAG: --enable-load-reader="false" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.579889 4651 flags.go:64] FLAG: --enable-server="true" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.579894 4651 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.579902 4651 flags.go:64] FLAG: --event-burst="100" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.579907 4651 flags.go:64] FLAG: --event-qps="50" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.579913 4651 flags.go:64] FLAG: --event-storage-age-limit="default=0" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.579918 4651 flags.go:64] FLAG: --event-storage-event-limit="default=0" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.579924 4651 flags.go:64] FLAG: --eviction-hard="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.579931 4651 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.579936 4651 flags.go:64] FLAG: --eviction-minimum-reclaim="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.579941 4651 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.579947 4651 flags.go:64] FLAG: --eviction-soft="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.579954 4651 flags.go:64] FLAG: --eviction-soft-grace-period="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.579959 4651 flags.go:64] FLAG: --exit-on-lock-contention="false" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.579964 4651 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.579970 4651 flags.go:64] FLAG: --experimental-mounter-path="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.579975 4651 flags.go:64] FLAG: --fail-cgroupv1="false" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.579981 4651 flags.go:64] FLAG: --fail-swap-on="true" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.579986 4651 flags.go:64] FLAG: --feature-gates="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.579993 4651 flags.go:64] FLAG: --file-check-frequency="20s" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.579999 4651 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.580005 4651 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.580011 4651 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.580016 4651 flags.go:64] FLAG: --healthz-port="10248" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.580022 4651 flags.go:64] FLAG: --help="false" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.580027 4651 flags.go:64] FLAG: --hostname-override="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.580032 4651 flags.go:64] FLAG: --housekeeping-interval="10s" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.580038 4651 flags.go:64] FLAG: --http-check-frequency="20s" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.580044 4651 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.580049 4651 flags.go:64] FLAG: --image-credential-provider-config="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.580054 4651 flags.go:64] FLAG: --image-gc-high-threshold="85" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.580061 4651 flags.go:64] FLAG: --image-gc-low-threshold="80" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.580067 4651 flags.go:64] FLAG: --image-service-endpoint="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.580072 4651 flags.go:64] FLAG: --kernel-memcg-notification="false" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.580078 4651 flags.go:64] FLAG: --kube-api-burst="100" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.580084 4651 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.580089 4651 flags.go:64] FLAG: --kube-api-qps="50" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.580095 4651 flags.go:64] FLAG: --kube-reserved="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.580100 4651 flags.go:64] FLAG: --kube-reserved-cgroup="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.580106 4651 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.580112 4651 flags.go:64] FLAG: --kubelet-cgroups="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.580117 4651 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.580123 4651 flags.go:64] FLAG: --lock-file="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.580129 4651 flags.go:64] FLAG: --log-cadvisor-usage="false" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.580134 4651 flags.go:64] FLAG: --log-flush-frequency="5s" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.580140 4651 flags.go:64] FLAG: --log-json-info-buffer-size="0" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.580151 4651 flags.go:64] FLAG: --log-json-split-stream="false" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.580157 4651 flags.go:64] FLAG: --log-text-info-buffer-size="0" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.580163 4651 flags.go:64] FLAG: --log-text-split-stream="false" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.580169 4651 flags.go:64] FLAG: --logging-format="text" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.580174 4651 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.580181 4651 flags.go:64] FLAG: --make-iptables-util-chains="true" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.580187 4651 flags.go:64] FLAG: --manifest-url="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.580192 4651 flags.go:64] FLAG: --manifest-url-header="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.580199 4651 flags.go:64] FLAG: --max-housekeeping-interval="15s" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.580205 4651 flags.go:64] FLAG: --max-open-files="1000000" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.580212 4651 flags.go:64] FLAG: --max-pods="110" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.580217 4651 flags.go:64] FLAG: --maximum-dead-containers="-1" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.580223 4651 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.580229 4651 flags.go:64] FLAG: --memory-manager-policy="None" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.580234 4651 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.580240 4651 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.580245 4651 flags.go:64] FLAG: --node-ip="192.168.126.11" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.580251 4651 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.580263 4651 flags.go:64] FLAG: --node-status-max-images="50" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.580269 4651 flags.go:64] FLAG: --node-status-update-frequency="10s" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.580274 4651 flags.go:64] FLAG: --oom-score-adj="-999" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.580280 4651 flags.go:64] FLAG: --pod-cidr="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.580285 4651 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.580293 4651 flags.go:64] FLAG: --pod-manifest-path="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.580298 4651 flags.go:64] FLAG: --pod-max-pids="-1" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.580304 4651 flags.go:64] FLAG: --pods-per-core="0" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.580309 4651 flags.go:64] FLAG: --port="10250" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.580315 4651 flags.go:64] FLAG: --protect-kernel-defaults="false" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.580321 4651 flags.go:64] FLAG: --provider-id="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.580326 4651 flags.go:64] FLAG: --qos-reserved="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.580332 4651 flags.go:64] FLAG: --read-only-port="10255" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.580337 4651 flags.go:64] FLAG: --register-node="true" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.580343 4651 flags.go:64] FLAG: --register-schedulable="true" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.580348 4651 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.580357 4651 flags.go:64] FLAG: --registry-burst="10" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.580362 4651 flags.go:64] FLAG: --registry-qps="5" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.580368 4651 flags.go:64] FLAG: --reserved-cpus="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.580373 4651 flags.go:64] FLAG: --reserved-memory="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.580380 4651 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.580386 4651 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.580391 4651 flags.go:64] FLAG: --rotate-certificates="false" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.580396 4651 flags.go:64] FLAG: --rotate-server-certificates="false" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.580402 4651 flags.go:64] FLAG: --runonce="false" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.580407 4651 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.580413 4651 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.580419 4651 flags.go:64] FLAG: --seccomp-default="false" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.580424 4651 flags.go:64] FLAG: --serialize-image-pulls="true" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.580430 4651 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.580435 4651 flags.go:64] FLAG: --storage-driver-db="cadvisor" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.580441 4651 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.580447 4651 flags.go:64] FLAG: --storage-driver-password="root" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.580452 4651 flags.go:64] FLAG: --storage-driver-secure="false" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.580458 4651 flags.go:64] FLAG: --storage-driver-table="stats" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.580463 4651 flags.go:64] FLAG: --storage-driver-user="root" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.580469 4651 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.580475 4651 flags.go:64] FLAG: --sync-frequency="1m0s" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.580481 4651 flags.go:64] FLAG: --system-cgroups="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.580487 4651 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.580496 4651 flags.go:64] FLAG: --system-reserved-cgroup="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.580502 4651 flags.go:64] FLAG: --tls-cert-file="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.580507 4651 flags.go:64] FLAG: --tls-cipher-suites="[]" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.580513 4651 flags.go:64] FLAG: --tls-min-version="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.580519 4651 flags.go:64] FLAG: --tls-private-key-file="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.580524 4651 flags.go:64] FLAG: --topology-manager-policy="none" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.580530 4651 flags.go:64] FLAG: --topology-manager-policy-options="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.580536 4651 flags.go:64] FLAG: --topology-manager-scope="container" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.580541 4651 flags.go:64] FLAG: --v="2" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.580548 4651 flags.go:64] FLAG: --version="false" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.580555 4651 flags.go:64] FLAG: --vmodule="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.580562 4651 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.580568 4651 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.580698 4651 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.580705 4651 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.580710 4651 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.580716 4651 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.580721 4651 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.580726 4651 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.580732 4651 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.580736 4651 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.580741 4651 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.580746 4651 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.580751 4651 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.580756 4651 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.580761 4651 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.580766 4651 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.580772 4651 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.580778 4651 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.580784 4651 feature_gate.go:330] unrecognized feature gate: GatewayAPI Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.580789 4651 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.580794 4651 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.580800 4651 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.580805 4651 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.580810 4651 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.580839 4651 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.580845 4651 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.580852 4651 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.580858 4651 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.580865 4651 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.580870 4651 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.580876 4651 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.580881 4651 feature_gate.go:330] unrecognized feature gate: PlatformOperators Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.580886 4651 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.580891 4651 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.580896 4651 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.580900 4651 feature_gate.go:330] unrecognized feature gate: SignatureStores Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.580905 4651 feature_gate.go:330] unrecognized feature gate: Example Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.580910 4651 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.580915 4651 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.580920 4651 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.580924 4651 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.580929 4651 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.580934 4651 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.580939 4651 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.580944 4651 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.580950 4651 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.580955 4651 feature_gate.go:330] unrecognized feature gate: OVNObservability Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.580960 4651 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.580964 4651 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.580969 4651 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.580974 4651 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.580979 4651 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.580984 4651 feature_gate.go:330] unrecognized feature gate: PinnedImages Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.580988 4651 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.580993 4651 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.580999 4651 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.581004 4651 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.581008 4651 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.581013 4651 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.581018 4651 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.581022 4651 feature_gate.go:330] unrecognized feature gate: NewOLM Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.581027 4651 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.581032 4651 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.581037 4651 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.581041 4651 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.581046 4651 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.581051 4651 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.581056 4651 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.581060 4651 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.581065 4651 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.581070 4651 feature_gate.go:330] unrecognized feature gate: InsightsConfig Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.581076 4651 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.581082 4651 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.581098 4651 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.594679 4651 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.594743 4651 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.594907 4651 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.594921 4651 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.594929 4651 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.594938 4651 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.594949 4651 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.594958 4651 feature_gate.go:330] unrecognized feature gate: SignatureStores Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.594966 4651 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.594974 4651 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.594981 4651 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.594990 4651 feature_gate.go:330] unrecognized feature gate: OVNObservability Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.595000 4651 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.595010 4651 feature_gate.go:330] unrecognized feature gate: InsightsConfig Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.595020 4651 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.595031 4651 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.595042 4651 feature_gate.go:330] unrecognized feature gate: PlatformOperators Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.595052 4651 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.595062 4651 feature_gate.go:330] unrecognized feature gate: PinnedImages Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.595071 4651 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.595082 4651 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.595097 4651 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.595112 4651 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.595123 4651 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.595132 4651 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.595141 4651 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.595151 4651 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.595163 4651 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.595173 4651 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.595183 4651 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.595192 4651 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.595202 4651 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.595213 4651 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.595226 4651 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.595241 4651 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.595256 4651 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.595269 4651 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.595279 4651 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.595290 4651 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.595301 4651 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.595312 4651 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.595320 4651 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.595329 4651 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.595336 4651 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.595346 4651 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.595354 4651 feature_gate.go:330] unrecognized feature gate: Example Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.595362 4651 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.595371 4651 feature_gate.go:330] unrecognized feature gate: GatewayAPI Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.595379 4651 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.595386 4651 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.595395 4651 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.595405 4651 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.595414 4651 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.595425 4651 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.595435 4651 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.595445 4651 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.595455 4651 feature_gate.go:330] unrecognized feature gate: NewOLM Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.595464 4651 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.595474 4651 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.595482 4651 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.595493 4651 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.595503 4651 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.595512 4651 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.595521 4651 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.595529 4651 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.595537 4651 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.595545 4651 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.595553 4651 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.595562 4651 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.595570 4651 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.595578 4651 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.595586 4651 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.595594 4651 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.595608 4651 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.595907 4651 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.595924 4651 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.595933 4651 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.595942 4651 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.595951 4651 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.595961 4651 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.595969 4651 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.595978 4651 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.595986 4651 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.595997 4651 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.596006 4651 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.596016 4651 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.596028 4651 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.596038 4651 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.596051 4651 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.596062 4651 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.596072 4651 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.596082 4651 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.596093 4651 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.596103 4651 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.596114 4651 feature_gate.go:330] unrecognized feature gate: GatewayAPI Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.596123 4651 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.596130 4651 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.596138 4651 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.596146 4651 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.596155 4651 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.596162 4651 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.596170 4651 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.596178 4651 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.596185 4651 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.596193 4651 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.596201 4651 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.596208 4651 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.596216 4651 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.596224 4651 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.596232 4651 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.596240 4651 feature_gate.go:330] unrecognized feature gate: PlatformOperators Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.596248 4651 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.596256 4651 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.596265 4651 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.596273 4651 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.596281 4651 feature_gate.go:330] unrecognized feature gate: PinnedImages Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.596289 4651 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.596297 4651 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.596304 4651 feature_gate.go:330] unrecognized feature gate: SignatureStores Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.596312 4651 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.596320 4651 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.596328 4651 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.596335 4651 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.596343 4651 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.596350 4651 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.596358 4651 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.596366 4651 feature_gate.go:330] unrecognized feature gate: NewOLM Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.596373 4651 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.596381 4651 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.596388 4651 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.596396 4651 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.596404 4651 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.596414 4651 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.596424 4651 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.596432 4651 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.596442 4651 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.596450 4651 feature_gate.go:330] unrecognized feature gate: Example Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.596458 4651 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.596466 4651 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.596474 4651 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.596482 4651 feature_gate.go:330] unrecognized feature gate: InsightsConfig Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.596490 4651 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.596497 4651 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.596505 4651 feature_gate.go:330] unrecognized feature gate: OVNObservability Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.596513 4651 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.596525 4651 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.598015 4651 server.go:940] "Client rotation is on, will bootstrap in background" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.604486 4651 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.604662 4651 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.607272 4651 server.go:997] "Starting client certificate rotation" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.607324 4651 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.607593 4651 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2025-12-18 08:57:22.010949659 +0000 UTC Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.607721 4651 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 1636h6m2.403234692s for next certificate rotation Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.640849 4651 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.646051 4651 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.665660 4651 log.go:25] "Validated CRI v1 runtime API" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.712219 4651 log.go:25] "Validated CRI v1 image API" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.714498 4651 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.723415 4651 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-10-11-03-56-17-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.723465 4651 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:42 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:43 fsType:tmpfs blockSize:0}] Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.756578 4651 manager.go:217] Machine: {Timestamp:2025-10-11 04:51:19.751464334 +0000 UTC m=+0.647697200 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2800000 MemoryCapacity:33654128640 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:f1c4ea71-0c28-43a7-99a4-e27ff72e186a BootID:a821a5c3-63e0-43db-82e0-e9c6e98ead52 Filesystems:[{Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:43 Capacity:1073741824 Type:vfs Inodes:4108170 HasInodes:true} {Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827064320 Type:vfs Inodes:4108170 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827064320 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:42 Capacity:3365412864 Type:vfs Inodes:821634 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:dd:ba:3d Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:dd:ba:3d Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:8a:9e:f5 Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:f8:6f:31 Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:22:1e:59 Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:fc:5b:82 Speed:-1 Mtu:1496} {Name:eth10 MacAddress:5e:ec:b1:76:96:24 Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:f6:ab:cc:e8:1f:b6 Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654128640 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.757095 4651 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.757353 4651 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.759608 4651 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.760008 4651 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.760072 4651 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.760393 4651 topology_manager.go:138] "Creating topology manager with none policy" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.760412 4651 container_manager_linux.go:303] "Creating device plugin manager" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.761134 4651 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.761233 4651 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.762247 4651 state_mem.go:36] "Initialized new in-memory state store" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.762395 4651 server.go:1245] "Using root directory" path="/var/lib/kubelet" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.766713 4651 kubelet.go:418] "Attempting to sync node with API server" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.766755 4651 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.766802 4651 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.766852 4651 kubelet.go:324] "Adding apiserver pod source" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.766874 4651 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.774279 4651 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.774857 4651 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.182:6443: connect: connection refused Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.774866 4651 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.182:6443: connect: connection refused Oct 11 04:51:19 crc kubenswrapper[4651]: E1011 04:51:19.775019 4651 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.182:6443: connect: connection refused" logger="UnhandledError" Oct 11 04:51:19 crc kubenswrapper[4651]: E1011 04:51:19.775027 4651 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.182:6443: connect: connection refused" logger="UnhandledError" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.775776 4651 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.777571 4651 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.779204 4651 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.779250 4651 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.779265 4651 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.779279 4651 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.779301 4651 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.779314 4651 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.779327 4651 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.779348 4651 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.779366 4651 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.779380 4651 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.779398 4651 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.779412 4651 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.780576 4651 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.781280 4651 server.go:1280] "Started kubelet" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.782304 4651 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.182:6443: connect: connection refused Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.782694 4651 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Oct 11 04:51:19 crc systemd[1]: Started Kubernetes Kubelet. Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.783375 4651 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.784094 4651 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.785690 4651 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.785758 4651 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Oct 11 04:51:19 crc kubenswrapper[4651]: E1011 04:51:19.786298 4651 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.786411 4651 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-18 13:39:40.016702843 +0000 UTC Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.786455 4651 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 1640h48m20.230250899s for next certificate rotation Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.786495 4651 volume_manager.go:287] "The desired_state_of_world populator starts" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.786507 4651 volume_manager.go:289] "Starting Kubelet Volume Manager" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.786673 4651 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.786876 4651 server.go:460] "Adding debug handlers to kubelet server" Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.789275 4651 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.182:6443: connect: connection refused Oct 11 04:51:19 crc kubenswrapper[4651]: E1011 04:51:19.789368 4651 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.182:6443: connect: connection refused" logger="UnhandledError" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.789568 4651 factory.go:55] Registering systemd factory Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.789613 4651 factory.go:221] Registration of the systemd container factory successfully Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.790313 4651 factory.go:153] Registering CRI-O factory Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.790351 4651 factory.go:221] Registration of the crio container factory successfully Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.790447 4651 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.790480 4651 factory.go:103] Registering Raw factory Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.790517 4651 manager.go:1196] Started watching for new ooms in manager Oct 11 04:51:19 crc kubenswrapper[4651]: E1011 04:51:19.790862 4651 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.182:6443: connect: connection refused" interval="200ms" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.791490 4651 manager.go:319] Starting recovery of all containers Oct 11 04:51:19 crc kubenswrapper[4651]: E1011 04:51:19.793988 4651 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.182:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.186d56a136d2a560 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-10-11 04:51:19.78122992 +0000 UTC m=+0.677462746,LastTimestamp:2025-10-11 04:51:19.78122992 +0000 UTC m=+0.677462746,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.808855 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.808928 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.808950 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.808972 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.809043 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.809061 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.809080 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.809099 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.809120 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.809138 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.809155 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.809173 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.809192 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.809213 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.809232 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.809249 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.809303 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.809323 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.809342 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.809360 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.809379 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.809396 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.809413 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.809464 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.809483 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.809501 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.809525 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.809545 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.809563 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.809583 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.809600 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.809619 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.809640 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.809658 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.809676 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.809693 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.809712 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.809730 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.809787 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.809807 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.809856 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.809922 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.809947 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.810012 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.810038 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.810060 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.810082 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.810106 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.810133 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.810152 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.810169 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.810188 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.810213 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.810234 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.810256 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.810276 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.810296 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.810315 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.810332 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.810350 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.810367 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.810384 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.810401 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.810421 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.810441 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.810458 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.810476 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.810495 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.810512 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.810531 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.810548 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.810580 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.810662 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.810907 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.810950 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.810981 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.811010 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.811038 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.811105 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.811134 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.811257 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.811321 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.811372 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.811440 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.811464 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.811501 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.811521 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.811541 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.811560 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.811599 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.811647 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.811669 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.811716 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.811763 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.811807 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.811884 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.811910 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.811954 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.816287 4651 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.816336 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.816361 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.816383 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.816402 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.816461 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.816485 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.816553 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.817161 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.817207 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.817231 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.817254 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.817331 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.817362 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.817389 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.817417 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.817442 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.817498 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.819366 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.819464 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.819488 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.819507 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.819526 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.819547 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.819570 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.819628 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.819647 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.819666 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.819686 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.819706 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.819725 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.819747 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.819765 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.819783 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.819801 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.819843 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.819866 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.819887 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.819907 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.819926 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.819944 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.819962 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.819980 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.819999 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.820017 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.820037 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.820058 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.820077 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.820096 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.820115 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.820134 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.820153 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.820171 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.820189 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.820206 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.820223 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.820243 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.820261 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.820280 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.820300 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.820318 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.820335 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.820352 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.820371 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.820390 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.820408 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.820426 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.820445 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.820462 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.820479 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.820498 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.820515 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.820532 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.820551 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.820569 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.820587 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.820606 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.820624 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.820643 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.820661 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.820679 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.820697 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.820769 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.820788 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.820807 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.820849 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.820874 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.820898 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.820919 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.820937 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.820956 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.820973 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.820995 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.821018 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.821038 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.821067 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.821087 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.821105 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.821128 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.821156 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.821184 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.821211 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.821231 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.821250 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.821267 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.821286 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.821305 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.821322 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.821341 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.821359 4651 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.821377 4651 reconstruct.go:97] "Volume reconstruction finished" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.821390 4651 reconciler.go:26] "Reconciler: start to sync state" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.832314 4651 manager.go:324] Recovery completed Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.840770 4651 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.841941 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.841971 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.842002 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.848074 4651 cpu_manager.go:225] "Starting CPU manager" policy="none" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.848096 4651 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.848115 4651 state_mem.go:36] "Initialized new in-memory state store" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.866094 4651 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.868111 4651 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.868174 4651 status_manager.go:217] "Starting to sync pod status with apiserver" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.868221 4651 kubelet.go:2335] "Starting kubelet main sync loop" Oct 11 04:51:19 crc kubenswrapper[4651]: E1011 04:51:19.868419 4651 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Oct 11 04:51:19 crc kubenswrapper[4651]: W1011 04:51:19.871170 4651 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.182:6443: connect: connection refused Oct 11 04:51:19 crc kubenswrapper[4651]: E1011 04:51:19.871274 4651 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.182:6443: connect: connection refused" logger="UnhandledError" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.873890 4651 policy_none.go:49] "None policy: Start" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.874977 4651 memory_manager.go:170] "Starting memorymanager" policy="None" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.875034 4651 state_mem.go:35] "Initializing new in-memory state store" Oct 11 04:51:19 crc kubenswrapper[4651]: E1011 04:51:19.888613 4651 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.945478 4651 manager.go:334] "Starting Device Plugin manager" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.945573 4651 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.945594 4651 server.go:79] "Starting device plugin registration server" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.946193 4651 eviction_manager.go:189] "Eviction manager: starting control loop" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.946247 4651 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.946474 4651 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.946657 4651 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.946678 4651 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Oct 11 04:51:19 crc kubenswrapper[4651]: E1011 04:51:19.961407 4651 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.968573 4651 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc","openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc"] Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.968702 4651 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.969899 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.969939 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.969981 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.970238 4651 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.970525 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.970582 4651 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.971294 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.971313 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.971321 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.971417 4651 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.971601 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.971719 4651 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.972321 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.972392 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.972411 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.972615 4651 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.972752 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.972881 4651 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.972784 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.972944 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.972960 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.972978 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.973034 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.973051 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.973782 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.973858 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.973875 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.974477 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.974517 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.974569 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.974799 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.974868 4651 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.975307 4651 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.975566 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.975593 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.975604 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.975983 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.976013 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.976022 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.976240 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.976267 4651 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.976993 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.977014 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:19 crc kubenswrapper[4651]: I1011 04:51:19.977021 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:19 crc kubenswrapper[4651]: E1011 04:51:19.992496 4651 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.182:6443: connect: connection refused" interval="400ms" Oct 11 04:51:20 crc kubenswrapper[4651]: I1011 04:51:20.023857 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 11 04:51:20 crc kubenswrapper[4651]: I1011 04:51:20.023889 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 11 04:51:20 crc kubenswrapper[4651]: I1011 04:51:20.023908 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 11 04:51:20 crc kubenswrapper[4651]: I1011 04:51:20.023932 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 11 04:51:20 crc kubenswrapper[4651]: I1011 04:51:20.023952 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 11 04:51:20 crc kubenswrapper[4651]: I1011 04:51:20.023969 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 11 04:51:20 crc kubenswrapper[4651]: I1011 04:51:20.023983 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 11 04:51:20 crc kubenswrapper[4651]: I1011 04:51:20.023999 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 11 04:51:20 crc kubenswrapper[4651]: I1011 04:51:20.024014 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 11 04:51:20 crc kubenswrapper[4651]: I1011 04:51:20.024028 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 11 04:51:20 crc kubenswrapper[4651]: I1011 04:51:20.024046 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 11 04:51:20 crc kubenswrapper[4651]: I1011 04:51:20.024063 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 11 04:51:20 crc kubenswrapper[4651]: I1011 04:51:20.024079 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 11 04:51:20 crc kubenswrapper[4651]: I1011 04:51:20.024126 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 11 04:51:20 crc kubenswrapper[4651]: I1011 04:51:20.024241 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 11 04:51:20 crc kubenswrapper[4651]: I1011 04:51:20.047005 4651 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 04:51:20 crc kubenswrapper[4651]: I1011 04:51:20.048226 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:20 crc kubenswrapper[4651]: I1011 04:51:20.048266 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:20 crc kubenswrapper[4651]: I1011 04:51:20.048278 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:20 crc kubenswrapper[4651]: I1011 04:51:20.048303 4651 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 11 04:51:20 crc kubenswrapper[4651]: E1011 04:51:20.048801 4651 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.182:6443: connect: connection refused" node="crc" Oct 11 04:51:20 crc kubenswrapper[4651]: I1011 04:51:20.126059 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 11 04:51:20 crc kubenswrapper[4651]: I1011 04:51:20.126340 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 11 04:51:20 crc kubenswrapper[4651]: I1011 04:51:20.126355 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 11 04:51:20 crc kubenswrapper[4651]: I1011 04:51:20.126372 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 11 04:51:20 crc kubenswrapper[4651]: I1011 04:51:20.126387 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 11 04:51:20 crc kubenswrapper[4651]: I1011 04:51:20.126403 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 11 04:51:20 crc kubenswrapper[4651]: I1011 04:51:20.126422 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 11 04:51:20 crc kubenswrapper[4651]: I1011 04:51:20.126437 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 11 04:51:20 crc kubenswrapper[4651]: I1011 04:51:20.126439 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 11 04:51:20 crc kubenswrapper[4651]: I1011 04:51:20.126478 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 11 04:51:20 crc kubenswrapper[4651]: I1011 04:51:20.126453 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 11 04:51:20 crc kubenswrapper[4651]: I1011 04:51:20.126524 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 11 04:51:20 crc kubenswrapper[4651]: I1011 04:51:20.126506 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 11 04:51:20 crc kubenswrapper[4651]: I1011 04:51:20.126532 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 11 04:51:20 crc kubenswrapper[4651]: I1011 04:51:20.126544 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 11 04:51:20 crc kubenswrapper[4651]: I1011 04:51:20.126580 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 11 04:51:20 crc kubenswrapper[4651]: I1011 04:51:20.126623 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 11 04:51:20 crc kubenswrapper[4651]: I1011 04:51:20.126634 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 11 04:51:20 crc kubenswrapper[4651]: I1011 04:51:20.126697 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 11 04:51:20 crc kubenswrapper[4651]: I1011 04:51:20.126708 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 11 04:51:20 crc kubenswrapper[4651]: I1011 04:51:20.126738 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 11 04:51:20 crc kubenswrapper[4651]: I1011 04:51:20.126777 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 11 04:51:20 crc kubenswrapper[4651]: I1011 04:51:20.126791 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 11 04:51:20 crc kubenswrapper[4651]: I1011 04:51:20.126843 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 11 04:51:20 crc kubenswrapper[4651]: I1011 04:51:20.126844 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 11 04:51:20 crc kubenswrapper[4651]: I1011 04:51:20.126901 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 11 04:51:20 crc kubenswrapper[4651]: I1011 04:51:20.126912 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 11 04:51:20 crc kubenswrapper[4651]: I1011 04:51:20.126937 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 11 04:51:20 crc kubenswrapper[4651]: I1011 04:51:20.127005 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 11 04:51:20 crc kubenswrapper[4651]: I1011 04:51:20.127155 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 11 04:51:20 crc kubenswrapper[4651]: I1011 04:51:20.249043 4651 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 04:51:20 crc kubenswrapper[4651]: I1011 04:51:20.251074 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:20 crc kubenswrapper[4651]: I1011 04:51:20.251156 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:20 crc kubenswrapper[4651]: I1011 04:51:20.251181 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:20 crc kubenswrapper[4651]: I1011 04:51:20.251226 4651 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 11 04:51:20 crc kubenswrapper[4651]: E1011 04:51:20.252000 4651 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.182:6443: connect: connection refused" node="crc" Oct 11 04:51:20 crc kubenswrapper[4651]: I1011 04:51:20.318046 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 11 04:51:20 crc kubenswrapper[4651]: I1011 04:51:20.332898 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 11 04:51:20 crc kubenswrapper[4651]: I1011 04:51:20.356144 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 11 04:51:20 crc kubenswrapper[4651]: I1011 04:51:20.364811 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 11 04:51:20 crc kubenswrapper[4651]: I1011 04:51:20.369077 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Oct 11 04:51:20 crc kubenswrapper[4651]: W1011 04:51:20.377366 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-d35a0733b8e05189160c280a8898857a6d0c72774b822f71acccd2842ff5f1b2 WatchSource:0}: Error finding container d35a0733b8e05189160c280a8898857a6d0c72774b822f71acccd2842ff5f1b2: Status 404 returned error can't find the container with id d35a0733b8e05189160c280a8898857a6d0c72774b822f71acccd2842ff5f1b2 Oct 11 04:51:20 crc kubenswrapper[4651]: W1011 04:51:20.379218 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-9e6a2afa82b74410310b23058da6815c55aae47ac1c7a3793602124e0bbb2f2a WatchSource:0}: Error finding container 9e6a2afa82b74410310b23058da6815c55aae47ac1c7a3793602124e0bbb2f2a: Status 404 returned error can't find the container with id 9e6a2afa82b74410310b23058da6815c55aae47ac1c7a3793602124e0bbb2f2a Oct 11 04:51:20 crc kubenswrapper[4651]: W1011 04:51:20.389488 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-5d5a1df759b0a1d61122e6b61f7e381e75b81cc1364be425a36b91e8dd64e43e WatchSource:0}: Error finding container 5d5a1df759b0a1d61122e6b61f7e381e75b81cc1364be425a36b91e8dd64e43e: Status 404 returned error can't find the container with id 5d5a1df759b0a1d61122e6b61f7e381e75b81cc1364be425a36b91e8dd64e43e Oct 11 04:51:20 crc kubenswrapper[4651]: W1011 04:51:20.392463 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-bb8dcb9f4d6719a4ce078cac3035a2b37b8fe368db015135865004e36108a9db WatchSource:0}: Error finding container bb8dcb9f4d6719a4ce078cac3035a2b37b8fe368db015135865004e36108a9db: Status 404 returned error can't find the container with id bb8dcb9f4d6719a4ce078cac3035a2b37b8fe368db015135865004e36108a9db Oct 11 04:51:20 crc kubenswrapper[4651]: E1011 04:51:20.393283 4651 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.182:6443: connect: connection refused" interval="800ms" Oct 11 04:51:20 crc kubenswrapper[4651]: W1011 04:51:20.396370 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-e1fa0f5cd5eb9bb536505016e60bfee6aa8c57f7ecf687764ee6ca4349b2058e WatchSource:0}: Error finding container e1fa0f5cd5eb9bb536505016e60bfee6aa8c57f7ecf687764ee6ca4349b2058e: Status 404 returned error can't find the container with id e1fa0f5cd5eb9bb536505016e60bfee6aa8c57f7ecf687764ee6ca4349b2058e Oct 11 04:51:20 crc kubenswrapper[4651]: W1011 04:51:20.597343 4651 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.182:6443: connect: connection refused Oct 11 04:51:20 crc kubenswrapper[4651]: E1011 04:51:20.597426 4651 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.182:6443: connect: connection refused" logger="UnhandledError" Oct 11 04:51:20 crc kubenswrapper[4651]: I1011 04:51:20.652631 4651 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 04:51:20 crc kubenswrapper[4651]: I1011 04:51:20.653783 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:20 crc kubenswrapper[4651]: I1011 04:51:20.653831 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:20 crc kubenswrapper[4651]: I1011 04:51:20.653841 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:20 crc kubenswrapper[4651]: I1011 04:51:20.653860 4651 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 11 04:51:20 crc kubenswrapper[4651]: E1011 04:51:20.654168 4651 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.182:6443: connect: connection refused" node="crc" Oct 11 04:51:20 crc kubenswrapper[4651]: I1011 04:51:20.783615 4651 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.182:6443: connect: connection refused Oct 11 04:51:20 crc kubenswrapper[4651]: W1011 04:51:20.869418 4651 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.182:6443: connect: connection refused Oct 11 04:51:20 crc kubenswrapper[4651]: E1011 04:51:20.869519 4651 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.182:6443: connect: connection refused" logger="UnhandledError" Oct 11 04:51:20 crc kubenswrapper[4651]: I1011 04:51:20.873423 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"e1fa0f5cd5eb9bb536505016e60bfee6aa8c57f7ecf687764ee6ca4349b2058e"} Oct 11 04:51:20 crc kubenswrapper[4651]: I1011 04:51:20.875376 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"bb8dcb9f4d6719a4ce078cac3035a2b37b8fe368db015135865004e36108a9db"} Oct 11 04:51:20 crc kubenswrapper[4651]: I1011 04:51:20.876524 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"5d5a1df759b0a1d61122e6b61f7e381e75b81cc1364be425a36b91e8dd64e43e"} Oct 11 04:51:20 crc kubenswrapper[4651]: I1011 04:51:20.877433 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"9e6a2afa82b74410310b23058da6815c55aae47ac1c7a3793602124e0bbb2f2a"} Oct 11 04:51:20 crc kubenswrapper[4651]: I1011 04:51:20.878445 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"d35a0733b8e05189160c280a8898857a6d0c72774b822f71acccd2842ff5f1b2"} Oct 11 04:51:21 crc kubenswrapper[4651]: E1011 04:51:21.194262 4651 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.182:6443: connect: connection refused" interval="1.6s" Oct 11 04:51:21 crc kubenswrapper[4651]: W1011 04:51:21.249970 4651 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.182:6443: connect: connection refused Oct 11 04:51:21 crc kubenswrapper[4651]: E1011 04:51:21.250050 4651 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.182:6443: connect: connection refused" logger="UnhandledError" Oct 11 04:51:21 crc kubenswrapper[4651]: W1011 04:51:21.329494 4651 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.182:6443: connect: connection refused Oct 11 04:51:21 crc kubenswrapper[4651]: E1011 04:51:21.329585 4651 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.182:6443: connect: connection refused" logger="UnhandledError" Oct 11 04:51:21 crc kubenswrapper[4651]: I1011 04:51:21.455030 4651 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 04:51:21 crc kubenswrapper[4651]: I1011 04:51:21.456268 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:21 crc kubenswrapper[4651]: I1011 04:51:21.456297 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:21 crc kubenswrapper[4651]: I1011 04:51:21.456306 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:21 crc kubenswrapper[4651]: I1011 04:51:21.456325 4651 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 11 04:51:21 crc kubenswrapper[4651]: E1011 04:51:21.456931 4651 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.182:6443: connect: connection refused" node="crc" Oct 11 04:51:21 crc kubenswrapper[4651]: I1011 04:51:21.783349 4651 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.182:6443: connect: connection refused Oct 11 04:51:21 crc kubenswrapper[4651]: I1011 04:51:21.884767 4651 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 04:51:21 crc kubenswrapper[4651]: I1011 04:51:21.884749 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"c6263dc47a8284e9849da1fb15c4788174252f3637665ac6e4b19298ca90c587"} Oct 11 04:51:21 crc kubenswrapper[4651]: I1011 04:51:21.885030 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"76ee2612930be6596c0186cc000d80039b52e225fd64102002ed9316a0605521"} Oct 11 04:51:21 crc kubenswrapper[4651]: I1011 04:51:21.885049 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"aabba06dd47ad528f830a47cc79ddabb3789d24ebed62a6e8f976df1b156ef1b"} Oct 11 04:51:21 crc kubenswrapper[4651]: I1011 04:51:21.885059 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"eb996f17546fc77ac1b8ed27428aaa6060822daa0156cf016dbb24ed278403a8"} Oct 11 04:51:21 crc kubenswrapper[4651]: I1011 04:51:21.885618 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:21 crc kubenswrapper[4651]: I1011 04:51:21.885641 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:21 crc kubenswrapper[4651]: I1011 04:51:21.885649 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:21 crc kubenswrapper[4651]: I1011 04:51:21.886585 4651 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="55c5a1cac9b1045ca931771e5a096d29e8d986a4d9baae8d779d94f2a30b6cd3" exitCode=0 Oct 11 04:51:21 crc kubenswrapper[4651]: I1011 04:51:21.886681 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"55c5a1cac9b1045ca931771e5a096d29e8d986a4d9baae8d779d94f2a30b6cd3"} Oct 11 04:51:21 crc kubenswrapper[4651]: I1011 04:51:21.886787 4651 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 04:51:21 crc kubenswrapper[4651]: I1011 04:51:21.889737 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:21 crc kubenswrapper[4651]: I1011 04:51:21.889774 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:21 crc kubenswrapper[4651]: I1011 04:51:21.889788 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:21 crc kubenswrapper[4651]: I1011 04:51:21.890807 4651 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="eacbdff310c730327d8f04e45b6234aaef1ab91e11b24e8dfd5e08989993076b" exitCode=0 Oct 11 04:51:21 crc kubenswrapper[4651]: I1011 04:51:21.890946 4651 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 04:51:21 crc kubenswrapper[4651]: I1011 04:51:21.890965 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"eacbdff310c730327d8f04e45b6234aaef1ab91e11b24e8dfd5e08989993076b"} Oct 11 04:51:21 crc kubenswrapper[4651]: I1011 04:51:21.891797 4651 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 04:51:21 crc kubenswrapper[4651]: I1011 04:51:21.892048 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:21 crc kubenswrapper[4651]: I1011 04:51:21.892080 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:21 crc kubenswrapper[4651]: I1011 04:51:21.892092 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:21 crc kubenswrapper[4651]: I1011 04:51:21.892892 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:21 crc kubenswrapper[4651]: I1011 04:51:21.892944 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:21 crc kubenswrapper[4651]: I1011 04:51:21.892964 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:21 crc kubenswrapper[4651]: I1011 04:51:21.893381 4651 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="dba40c78d42cfd10414df43e27860b89083b6988bc0f0375c286af81e8a7ef32" exitCode=0 Oct 11 04:51:21 crc kubenswrapper[4651]: I1011 04:51:21.893423 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"dba40c78d42cfd10414df43e27860b89083b6988bc0f0375c286af81e8a7ef32"} Oct 11 04:51:21 crc kubenswrapper[4651]: I1011 04:51:21.893441 4651 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 04:51:21 crc kubenswrapper[4651]: I1011 04:51:21.894295 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:21 crc kubenswrapper[4651]: I1011 04:51:21.894309 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:21 crc kubenswrapper[4651]: I1011 04:51:21.894317 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:21 crc kubenswrapper[4651]: I1011 04:51:21.896629 4651 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="96c869e21e725921f47799368a3327f628bbdd7d7db8b4d0f29bf27b4d04551b" exitCode=0 Oct 11 04:51:21 crc kubenswrapper[4651]: I1011 04:51:21.896662 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"96c869e21e725921f47799368a3327f628bbdd7d7db8b4d0f29bf27b4d04551b"} Oct 11 04:51:21 crc kubenswrapper[4651]: I1011 04:51:21.896753 4651 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 04:51:21 crc kubenswrapper[4651]: I1011 04:51:21.898003 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:21 crc kubenswrapper[4651]: I1011 04:51:21.898031 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:21 crc kubenswrapper[4651]: I1011 04:51:21.898042 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:22 crc kubenswrapper[4651]: W1011 04:51:22.304300 4651 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.182:6443: connect: connection refused Oct 11 04:51:22 crc kubenswrapper[4651]: E1011 04:51:22.304365 4651 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.182:6443: connect: connection refused" logger="UnhandledError" Oct 11 04:51:22 crc kubenswrapper[4651]: I1011 04:51:22.374327 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 11 04:51:22 crc kubenswrapper[4651]: I1011 04:51:22.385267 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 11 04:51:22 crc kubenswrapper[4651]: I1011 04:51:22.783637 4651 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.182:6443: connect: connection refused Oct 11 04:51:22 crc kubenswrapper[4651]: E1011 04:51:22.795789 4651 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.182:6443: connect: connection refused" interval="3.2s" Oct 11 04:51:22 crc kubenswrapper[4651]: I1011 04:51:22.904868 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"c0795ced06735c12d7560b7563d12ebbda3afb26aab35978a6f8b42216eeb1cf"} Oct 11 04:51:22 crc kubenswrapper[4651]: I1011 04:51:22.904900 4651 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 04:51:22 crc kubenswrapper[4651]: I1011 04:51:22.905960 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:22 crc kubenswrapper[4651]: I1011 04:51:22.906043 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:22 crc kubenswrapper[4651]: I1011 04:51:22.906066 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:22 crc kubenswrapper[4651]: I1011 04:51:22.914924 4651 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 04:51:22 crc kubenswrapper[4651]: I1011 04:51:22.914936 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"2aaeb6d47f2435f2865c7516d976fecaf6de20b458b5cdcea1cdf59449cdef9d"} Oct 11 04:51:22 crc kubenswrapper[4651]: I1011 04:51:22.915013 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"bf6183b5381ec04f62d32175471097bd2d2088003063202375b88ccfb9080fae"} Oct 11 04:51:22 crc kubenswrapper[4651]: I1011 04:51:22.915033 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"918c77155c7ad4e14a9706e6e36a26cf2c774133b3435468d326b1b8c1f29f45"} Oct 11 04:51:22 crc kubenswrapper[4651]: I1011 04:51:22.916116 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:22 crc kubenswrapper[4651]: I1011 04:51:22.916159 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:22 crc kubenswrapper[4651]: I1011 04:51:22.916168 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:22 crc kubenswrapper[4651]: I1011 04:51:22.919436 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"5e14956f18479aa61fa0755b9c6c4f26ba7ac18f3023e177a76b9a59101b37c1"} Oct 11 04:51:22 crc kubenswrapper[4651]: I1011 04:51:22.919481 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"f8633747abf191f040cb14f5a76529487743229338070aeca597fc93dae8b3f1"} Oct 11 04:51:22 crc kubenswrapper[4651]: I1011 04:51:22.919500 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"e6fdcb798e11fca3b7b427f5ef0ac955a893709ffedaa472aaab6c219994e940"} Oct 11 04:51:22 crc kubenswrapper[4651]: I1011 04:51:22.919519 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"ff57b9678d77f617cfc792258091526c2a2f8125c240abfe0b15f693abc948eb"} Oct 11 04:51:22 crc kubenswrapper[4651]: I1011 04:51:22.919536 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"9521c3af1580aa2a62fc760916de9dc4abdc437700567a6ae5df3a96da2e8942"} Oct 11 04:51:22 crc kubenswrapper[4651]: I1011 04:51:22.919689 4651 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 04:51:22 crc kubenswrapper[4651]: I1011 04:51:22.920811 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:22 crc kubenswrapper[4651]: I1011 04:51:22.920876 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:22 crc kubenswrapper[4651]: I1011 04:51:22.920893 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:22 crc kubenswrapper[4651]: I1011 04:51:22.923174 4651 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="0c016dfb1ee17c969a194814340ad0141733dd0fcf5134c906b9e1672545ada7" exitCode=0 Oct 11 04:51:22 crc kubenswrapper[4651]: I1011 04:51:22.923343 4651 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 04:51:22 crc kubenswrapper[4651]: I1011 04:51:22.924083 4651 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 04:51:22 crc kubenswrapper[4651]: I1011 04:51:22.924597 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"0c016dfb1ee17c969a194814340ad0141733dd0fcf5134c906b9e1672545ada7"} Oct 11 04:51:22 crc kubenswrapper[4651]: I1011 04:51:22.924642 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 11 04:51:22 crc kubenswrapper[4651]: I1011 04:51:22.925139 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:22 crc kubenswrapper[4651]: I1011 04:51:22.925172 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:22 crc kubenswrapper[4651]: I1011 04:51:22.925190 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:22 crc kubenswrapper[4651]: I1011 04:51:22.926016 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:22 crc kubenswrapper[4651]: I1011 04:51:22.926048 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:22 crc kubenswrapper[4651]: I1011 04:51:22.926064 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:23 crc kubenswrapper[4651]: W1011 04:51:23.010523 4651 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.182:6443: connect: connection refused Oct 11 04:51:23 crc kubenswrapper[4651]: E1011 04:51:23.010583 4651 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.182:6443: connect: connection refused" logger="UnhandledError" Oct 11 04:51:23 crc kubenswrapper[4651]: I1011 04:51:23.057934 4651 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 04:51:23 crc kubenswrapper[4651]: I1011 04:51:23.059060 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:23 crc kubenswrapper[4651]: I1011 04:51:23.059105 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:23 crc kubenswrapper[4651]: I1011 04:51:23.059119 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:23 crc kubenswrapper[4651]: I1011 04:51:23.059148 4651 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 11 04:51:23 crc kubenswrapper[4651]: E1011 04:51:23.059656 4651 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.182:6443: connect: connection refused" node="crc" Oct 11 04:51:23 crc kubenswrapper[4651]: E1011 04:51:23.171790 4651 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.182:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.186d56a136d2a560 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-10-11 04:51:19.78122992 +0000 UTC m=+0.677462746,LastTimestamp:2025-10-11 04:51:19.78122992 +0000 UTC m=+0.677462746,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Oct 11 04:51:23 crc kubenswrapper[4651]: I1011 04:51:23.523026 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 11 04:51:23 crc kubenswrapper[4651]: I1011 04:51:23.654153 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 11 04:51:23 crc kubenswrapper[4651]: I1011 04:51:23.928398 4651 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="2769e1d46f0c5e6c439b546ded9782fb5f911c76a603d9ab088ba97dab11aa94" exitCode=0 Oct 11 04:51:23 crc kubenswrapper[4651]: I1011 04:51:23.928509 4651 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 11 04:51:23 crc kubenswrapper[4651]: I1011 04:51:23.928565 4651 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 04:51:23 crc kubenswrapper[4651]: I1011 04:51:23.928609 4651 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 04:51:23 crc kubenswrapper[4651]: I1011 04:51:23.928994 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"2769e1d46f0c5e6c439b546ded9782fb5f911c76a603d9ab088ba97dab11aa94"} Oct 11 04:51:23 crc kubenswrapper[4651]: I1011 04:51:23.929064 4651 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 04:51:23 crc kubenswrapper[4651]: I1011 04:51:23.929460 4651 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 11 04:51:23 crc kubenswrapper[4651]: I1011 04:51:23.929479 4651 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 04:51:23 crc kubenswrapper[4651]: I1011 04:51:23.929507 4651 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 04:51:23 crc kubenswrapper[4651]: I1011 04:51:23.930046 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:23 crc kubenswrapper[4651]: I1011 04:51:23.930095 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:23 crc kubenswrapper[4651]: I1011 04:51:23.930114 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:23 crc kubenswrapper[4651]: I1011 04:51:23.930481 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:23 crc kubenswrapper[4651]: I1011 04:51:23.930535 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:23 crc kubenswrapper[4651]: I1011 04:51:23.930558 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:23 crc kubenswrapper[4651]: I1011 04:51:23.930605 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:23 crc kubenswrapper[4651]: I1011 04:51:23.930627 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:23 crc kubenswrapper[4651]: I1011 04:51:23.930635 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:23 crc kubenswrapper[4651]: I1011 04:51:23.930652 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:23 crc kubenswrapper[4651]: I1011 04:51:23.930694 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:23 crc kubenswrapper[4651]: I1011 04:51:23.930719 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:23 crc kubenswrapper[4651]: I1011 04:51:23.931406 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:23 crc kubenswrapper[4651]: I1011 04:51:23.931459 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:23 crc kubenswrapper[4651]: I1011 04:51:23.931486 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:24 crc kubenswrapper[4651]: I1011 04:51:24.012727 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 11 04:51:24 crc kubenswrapper[4651]: I1011 04:51:24.939437 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"b039fb16d5f290b9d83bfab99a513b49e03c62a06b7883a86ee52f8693dc843b"} Oct 11 04:51:24 crc kubenswrapper[4651]: I1011 04:51:24.939487 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"3bb0c1412db5c7d621d713947f0f4feadb1c3832f87c2e7b4179ee026869fbdd"} Oct 11 04:51:24 crc kubenswrapper[4651]: I1011 04:51:24.939503 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"36a6ea1f92c4b58aefe55b7a1b35f9ea1d05ee3d70d26cc244d10defa42c97c5"} Oct 11 04:51:24 crc kubenswrapper[4651]: I1011 04:51:24.939515 4651 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 04:51:24 crc kubenswrapper[4651]: I1011 04:51:24.939516 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"a77af365af6edc727a5006cdab838118dee1c2d524cf7596eae53a06a450e998"} Oct 11 04:51:24 crc kubenswrapper[4651]: I1011 04:51:24.940239 4651 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 04:51:24 crc kubenswrapper[4651]: I1011 04:51:24.940343 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:24 crc kubenswrapper[4651]: I1011 04:51:24.940406 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:24 crc kubenswrapper[4651]: I1011 04:51:24.940425 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:24 crc kubenswrapper[4651]: I1011 04:51:24.941597 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:24 crc kubenswrapper[4651]: I1011 04:51:24.941632 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:24 crc kubenswrapper[4651]: I1011 04:51:24.941644 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:25 crc kubenswrapper[4651]: I1011 04:51:25.642538 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 11 04:51:25 crc kubenswrapper[4651]: I1011 04:51:25.951170 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"e8ea66ca56ff1ac6e2d040467173b103ad3cc124350e7a38638925469650ec33"} Oct 11 04:51:25 crc kubenswrapper[4651]: I1011 04:51:25.951289 4651 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 04:51:25 crc kubenswrapper[4651]: I1011 04:51:25.951336 4651 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 04:51:25 crc kubenswrapper[4651]: I1011 04:51:25.952915 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:25 crc kubenswrapper[4651]: I1011 04:51:25.952963 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:25 crc kubenswrapper[4651]: I1011 04:51:25.952985 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:25 crc kubenswrapper[4651]: I1011 04:51:25.952986 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:25 crc kubenswrapper[4651]: I1011 04:51:25.953136 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:25 crc kubenswrapper[4651]: I1011 04:51:25.953166 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:26 crc kubenswrapper[4651]: I1011 04:51:26.260078 4651 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 04:51:26 crc kubenswrapper[4651]: I1011 04:51:26.261551 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:26 crc kubenswrapper[4651]: I1011 04:51:26.261580 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:26 crc kubenswrapper[4651]: I1011 04:51:26.261589 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:26 crc kubenswrapper[4651]: I1011 04:51:26.261608 4651 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 11 04:51:26 crc kubenswrapper[4651]: I1011 04:51:26.523552 4651 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Oct 11 04:51:26 crc kubenswrapper[4651]: I1011 04:51:26.523651 4651 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 11 04:51:26 crc kubenswrapper[4651]: I1011 04:51:26.625106 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 11 04:51:26 crc kubenswrapper[4651]: I1011 04:51:26.625333 4651 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 04:51:26 crc kubenswrapper[4651]: I1011 04:51:26.626702 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:26 crc kubenswrapper[4651]: I1011 04:51:26.626788 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:26 crc kubenswrapper[4651]: I1011 04:51:26.626813 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:26 crc kubenswrapper[4651]: I1011 04:51:26.851850 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Oct 11 04:51:26 crc kubenswrapper[4651]: I1011 04:51:26.953901 4651 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 04:51:26 crc kubenswrapper[4651]: I1011 04:51:26.953976 4651 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 04:51:26 crc kubenswrapper[4651]: I1011 04:51:26.955297 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:26 crc kubenswrapper[4651]: I1011 04:51:26.955334 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:26 crc kubenswrapper[4651]: I1011 04:51:26.955408 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:26 crc kubenswrapper[4651]: I1011 04:51:26.955428 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:26 crc kubenswrapper[4651]: I1011 04:51:26.955350 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:26 crc kubenswrapper[4651]: I1011 04:51:26.955552 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:27 crc kubenswrapper[4651]: I1011 04:51:27.956459 4651 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 04:51:27 crc kubenswrapper[4651]: I1011 04:51:27.957556 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:27 crc kubenswrapper[4651]: I1011 04:51:27.957615 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:27 crc kubenswrapper[4651]: I1011 04:51:27.957639 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:29 crc kubenswrapper[4651]: I1011 04:51:29.594765 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 11 04:51:29 crc kubenswrapper[4651]: I1011 04:51:29.595055 4651 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 04:51:29 crc kubenswrapper[4651]: I1011 04:51:29.596493 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:29 crc kubenswrapper[4651]: I1011 04:51:29.596551 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:29 crc kubenswrapper[4651]: I1011 04:51:29.596564 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:29 crc kubenswrapper[4651]: E1011 04:51:29.962062 4651 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Oct 11 04:51:33 crc kubenswrapper[4651]: W1011 04:51:33.431707 4651 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": net/http: TLS handshake timeout Oct 11 04:51:33 crc kubenswrapper[4651]: I1011 04:51:33.431804 4651 trace.go:236] Trace[303844569]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (11-Oct-2025 04:51:23.430) (total time: 10001ms): Oct 11 04:51:33 crc kubenswrapper[4651]: Trace[303844569]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (04:51:33.431) Oct 11 04:51:33 crc kubenswrapper[4651]: Trace[303844569]: [10.001403907s] [10.001403907s] END Oct 11 04:51:33 crc kubenswrapper[4651]: E1011 04:51:33.431848 4651 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Oct 11 04:51:33 crc kubenswrapper[4651]: I1011 04:51:33.654349 4651 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="Get \"https://192.168.126.11:6443/livez\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Oct 11 04:51:33 crc kubenswrapper[4651]: I1011 04:51:33.654437 4651 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="Get \"https://192.168.126.11:6443/livez\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 11 04:51:33 crc kubenswrapper[4651]: W1011 04:51:33.674103 4651 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": net/http: TLS handshake timeout Oct 11 04:51:33 crc kubenswrapper[4651]: I1011 04:51:33.674388 4651 trace.go:236] Trace[2034997622]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (11-Oct-2025 04:51:23.672) (total time: 10001ms): Oct 11 04:51:33 crc kubenswrapper[4651]: Trace[2034997622]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (04:51:33.674) Oct 11 04:51:33 crc kubenswrapper[4651]: Trace[2034997622]: [10.001779896s] [10.001779896s] END Oct 11 04:51:33 crc kubenswrapper[4651]: E1011 04:51:33.674661 4651 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Oct 11 04:51:33 crc kubenswrapper[4651]: I1011 04:51:33.785201 4651 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Oct 11 04:51:33 crc kubenswrapper[4651]: I1011 04:51:33.973737 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Oct 11 04:51:33 crc kubenswrapper[4651]: I1011 04:51:33.976325 4651 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="5e14956f18479aa61fa0755b9c6c4f26ba7ac18f3023e177a76b9a59101b37c1" exitCode=255 Oct 11 04:51:33 crc kubenswrapper[4651]: I1011 04:51:33.976370 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"5e14956f18479aa61fa0755b9c6c4f26ba7ac18f3023e177a76b9a59101b37c1"} Oct 11 04:51:33 crc kubenswrapper[4651]: I1011 04:51:33.976522 4651 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 04:51:33 crc kubenswrapper[4651]: I1011 04:51:33.977484 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:33 crc kubenswrapper[4651]: I1011 04:51:33.977540 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:33 crc kubenswrapper[4651]: I1011 04:51:33.977558 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:33 crc kubenswrapper[4651]: I1011 04:51:33.978387 4651 scope.go:117] "RemoveContainer" containerID="5e14956f18479aa61fa0755b9c6c4f26ba7ac18f3023e177a76b9a59101b37c1" Oct 11 04:51:34 crc kubenswrapper[4651]: I1011 04:51:34.137155 4651 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Oct 11 04:51:34 crc kubenswrapper[4651]: I1011 04:51:34.137211 4651 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Oct 11 04:51:34 crc kubenswrapper[4651]: I1011 04:51:34.981698 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Oct 11 04:51:34 crc kubenswrapper[4651]: I1011 04:51:34.983632 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"11e12782179b96dac12bfdd3a25526e93f18da255e2d2fb0d7522e6d9c373b4e"} Oct 11 04:51:34 crc kubenswrapper[4651]: I1011 04:51:34.983788 4651 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 04:51:34 crc kubenswrapper[4651]: I1011 04:51:34.984710 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:34 crc kubenswrapper[4651]: I1011 04:51:34.984779 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:34 crc kubenswrapper[4651]: I1011 04:51:34.984802 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:35 crc kubenswrapper[4651]: I1011 04:51:35.668067 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Oct 11 04:51:35 crc kubenswrapper[4651]: I1011 04:51:35.668315 4651 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 04:51:35 crc kubenswrapper[4651]: I1011 04:51:35.669572 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:35 crc kubenswrapper[4651]: I1011 04:51:35.669629 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:35 crc kubenswrapper[4651]: I1011 04:51:35.669647 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:35 crc kubenswrapper[4651]: I1011 04:51:35.701000 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Oct 11 04:51:35 crc kubenswrapper[4651]: I1011 04:51:35.986810 4651 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 04:51:35 crc kubenswrapper[4651]: I1011 04:51:35.988144 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:35 crc kubenswrapper[4651]: I1011 04:51:35.988192 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:35 crc kubenswrapper[4651]: I1011 04:51:35.988210 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:36 crc kubenswrapper[4651]: I1011 04:51:36.009097 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Oct 11 04:51:36 crc kubenswrapper[4651]: I1011 04:51:36.524263 4651 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Oct 11 04:51:36 crc kubenswrapper[4651]: I1011 04:51:36.524382 4651 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 11 04:51:36 crc kubenswrapper[4651]: I1011 04:51:36.989583 4651 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 04:51:36 crc kubenswrapper[4651]: I1011 04:51:36.991029 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:36 crc kubenswrapper[4651]: I1011 04:51:36.991059 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:36 crc kubenswrapper[4651]: I1011 04:51:36.991072 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:37 crc kubenswrapper[4651]: I1011 04:51:37.459303 4651 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Oct 11 04:51:38 crc kubenswrapper[4651]: I1011 04:51:38.659906 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 11 04:51:38 crc kubenswrapper[4651]: I1011 04:51:38.660104 4651 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 04:51:38 crc kubenswrapper[4651]: I1011 04:51:38.660240 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 11 04:51:38 crc kubenswrapper[4651]: I1011 04:51:38.661463 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:38 crc kubenswrapper[4651]: I1011 04:51:38.661526 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:38 crc kubenswrapper[4651]: I1011 04:51:38.661565 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:38 crc kubenswrapper[4651]: I1011 04:51:38.667450 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 11 04:51:38 crc kubenswrapper[4651]: I1011 04:51:38.995471 4651 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 04:51:38 crc kubenswrapper[4651]: I1011 04:51:38.996309 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:38 crc kubenswrapper[4651]: I1011 04:51:38.996345 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:38 crc kubenswrapper[4651]: I1011 04:51:38.996355 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.004683 4651 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Oct 11 04:51:39 crc kubenswrapper[4651]: E1011 04:51:39.138383 4651 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="6.4s" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.139457 4651 trace.go:236] Trace[1722306677]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (11-Oct-2025 04:51:26.975) (total time: 12164ms): Oct 11 04:51:39 crc kubenswrapper[4651]: Trace[1722306677]: ---"Objects listed" error: 12163ms (04:51:39.139) Oct 11 04:51:39 crc kubenswrapper[4651]: Trace[1722306677]: [12.164018662s] [12.164018662s] END Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.139483 4651 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.140719 4651 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.140850 4651 trace.go:236] Trace[1523462935]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (11-Oct-2025 04:51:28.634) (total time: 10506ms): Oct 11 04:51:39 crc kubenswrapper[4651]: Trace[1523462935]: ---"Objects listed" error: 10506ms (04:51:39.140) Oct 11 04:51:39 crc kubenswrapper[4651]: Trace[1523462935]: [10.50642345s] [10.50642345s] END Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.140884 4651 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Oct 11 04:51:39 crc kubenswrapper[4651]: E1011 04:51:39.141551 4651 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.779191 4651 apiserver.go:52] "Watching apiserver" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.784533 4651 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.784851 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-phsgk","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-dns/node-resolver-kwhmr","openshift-machine-config-operator/machine-config-daemon-78jnv","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-operator/iptables-alerter-4ln5h"] Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.785211 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.785278 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 04:51:39 crc kubenswrapper[4651]: E1011 04:51:39.785327 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.785334 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.785388 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.785514 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.785550 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-phsgk" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.785610 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.785669 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-kwhmr" Oct 11 04:51:39 crc kubenswrapper[4651]: E1011 04:51:39.785767 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.785962 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" Oct 11 04:51:39 crc kubenswrapper[4651]: E1011 04:51:39.786042 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.787561 4651 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.788172 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.788571 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.788703 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.789020 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.789048 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.789066 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.789228 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.789320 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.789430 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.789375 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.789932 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.789998 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.789935 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.790025 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.790193 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.790228 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.790809 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.790890 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.790949 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.793952 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.793970 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.802360 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.812164 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.820963 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-phsgk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e8fb74d-013d-4103-b029-e8416d079dcf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4ftf8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-phsgk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.837229 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.844350 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.844391 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.844409 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.844427 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.844443 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.844458 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.844476 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.844493 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.844507 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.844521 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.844543 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.844566 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.844611 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.844630 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.844645 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.844659 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.844677 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.844695 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.844710 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.844729 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.844744 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.844761 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.844775 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.844791 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.844843 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.844862 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.844878 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.844894 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.844911 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.844926 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.844941 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.844957 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.844975 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.844990 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.845009 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.845023 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.845038 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.845070 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.845085 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.845101 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.845122 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.845145 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.845165 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.845181 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.845201 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.845222 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.845252 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.845269 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.845257 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.845284 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.845356 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.845386 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.845409 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.845430 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.845458 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.845480 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.845501 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.845523 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.845544 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.845568 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.845593 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.845656 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.845680 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.845702 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.845724 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.845746 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.845768 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.845789 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.845810 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.845853 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.845879 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.845902 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.845910 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.845924 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.845947 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.845968 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.845992 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.846013 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.846034 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.846022 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.846055 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.846062 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.846077 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.846099 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.846120 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.846145 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.846167 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.846189 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.846210 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.846220 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.846230 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.847151 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.847230 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.847424 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.847526 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.847623 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.847518 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.848167 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.848189 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.848276 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.848361 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.848417 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.848463 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.848605 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.848793 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.848769 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.848861 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.848879 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.848916 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.848931 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.849023 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.849044 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.849049 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.849147 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.849169 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.849150 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.849260 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.849312 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.849383 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.849403 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.849427 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.849479 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.849655 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.849714 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.850035 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.850071 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.849741 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.850196 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.850312 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.850423 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.850527 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.850558 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.850675 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.850680 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.849790 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.849875 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.849901 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: E1011 04:51:39.847662 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 04:51:40.34756568 +0000 UTC m=+21.243798486 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.850966 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.851000 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.851036 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.851065 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.851094 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.851119 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.851146 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.851176 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.851235 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.851266 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.851295 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.851323 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.851349 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.851376 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.851405 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.851436 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.851463 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.851491 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.851517 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.851543 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.851573 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.851602 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.851628 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.851655 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.851686 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.851710 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.851737 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.851764 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.851769 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.851794 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.851837 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.851898 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.851927 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.851951 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.851979 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.852006 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.852032 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.852063 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.852090 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.852119 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.852143 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.852171 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.852239 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.852299 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.852333 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.852361 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.852381 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.852391 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.852629 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.852964 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.853018 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.853046 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.853073 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.849966 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.853101 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.853123 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.853148 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.853174 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.853200 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.853222 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.853247 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.853275 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.853297 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.853325 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.853351 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.853386 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.853386 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.853411 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.853436 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.853461 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.853485 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.853510 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.853531 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.853555 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.853580 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.853600 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.854781 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.854830 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.854854 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.854874 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.854897 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.854921 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.854939 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.854960 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.854979 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.854997 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.855018 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.855038 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.853643 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.853755 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.854020 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.850801 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.854196 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.854215 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.854561 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.854573 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.855310 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.854710 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.854859 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.855314 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.855136 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.855476 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.855500 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.855640 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.855749 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.855864 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.855856 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.855960 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.856140 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.856193 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.856201 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.856242 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.856309 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.856520 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.856711 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.856770 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.856705 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.856795 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.856957 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.857270 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.857272 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.857426 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.857465 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.857514 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.857586 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.857564 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.857650 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.857672 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.858476 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.858618 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.858934 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.858941 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.859035 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.859063 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.859127 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.859426 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.859962 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.859989 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.860336 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.860482 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.856117 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.861284 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.861166 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.861378 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.861550 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.861577 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.861661 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.861681 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.861689 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.861685 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.861729 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.861768 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.861770 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.861794 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.861830 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.861854 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.861872 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.861893 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.861898 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.861909 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.861926 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.861943 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.861917 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.861952 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.861959 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.861993 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.862013 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.862029 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.862044 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.862061 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.862076 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.862126 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.862148 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.862166 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.862187 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.862249 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.862278 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.862353 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.862361 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.862379 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.862410 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.862465 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: E1011 04:51:39.862524 4651 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.862921 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: E1011 04:51:39.862957 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-11 04:51:40.362934809 +0000 UTC m=+21.259167625 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.862422 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.863011 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/9e8fb74d-013d-4103-b029-e8416d079dcf-serviceca\") pod \"node-ca-phsgk\" (UID: \"9e8fb74d-013d-4103-b029-e8416d079dcf\") " pod="openshift-image-registry/node-ca-phsgk" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.863040 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/519a1ae1-e964-48b0-8b61-835146df28c1-rootfs\") pod \"machine-config-daemon-78jnv\" (UID: \"519a1ae1-e964-48b0-8b61-835146df28c1\") " pod="openshift-machine-config-operator/machine-config-daemon-78jnv" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.862982 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.863070 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.863128 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.863161 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.863192 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4ftf8\" (UniqueName: \"kubernetes.io/projected/9e8fb74d-013d-4103-b029-e8416d079dcf-kube-api-access-4ftf8\") pod \"node-ca-phsgk\" (UID: \"9e8fb74d-013d-4103-b029-e8416d079dcf\") " pod="openshift-image-registry/node-ca-phsgk" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.863219 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.863237 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.863244 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z7n87\" (UniqueName: \"kubernetes.io/projected/dbcac3cb-b774-47ec-a86c-b22191d14d99-kube-api-access-z7n87\") pod \"node-resolver-kwhmr\" (UID: \"dbcac3cb-b774-47ec-a86c-b22191d14d99\") " pod="openshift-dns/node-resolver-kwhmr" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.863324 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/519a1ae1-e964-48b0-8b61-835146df28c1-proxy-tls\") pod \"machine-config-daemon-78jnv\" (UID: \"519a1ae1-e964-48b0-8b61-835146df28c1\") " pod="openshift-machine-config-operator/machine-config-daemon-78jnv" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.863351 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.863377 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9e8fb74d-013d-4103-b029-e8416d079dcf-host\") pod \"node-ca-phsgk\" (UID: \"9e8fb74d-013d-4103-b029-e8416d079dcf\") " pod="openshift-image-registry/node-ca-phsgk" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.863405 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.863434 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.863461 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/dbcac3cb-b774-47ec-a86c-b22191d14d99-hosts-file\") pod \"node-resolver-kwhmr\" (UID: \"dbcac3cb-b774-47ec-a86c-b22191d14d99\") " pod="openshift-dns/node-resolver-kwhmr" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.863489 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.863514 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.863534 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.863552 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.863569 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/519a1ae1-e964-48b0-8b61-835146df28c1-mcd-auth-proxy-config\") pod \"machine-config-daemon-78jnv\" (UID: \"519a1ae1-e964-48b0-8b61-835146df28c1\") " pod="openshift-machine-config-operator/machine-config-daemon-78jnv" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.863587 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wkk99\" (UniqueName: \"kubernetes.io/projected/519a1ae1-e964-48b0-8b61-835146df28c1-kube-api-access-wkk99\") pod \"machine-config-daemon-78jnv\" (UID: \"519a1ae1-e964-48b0-8b61-835146df28c1\") " pod="openshift-machine-config-operator/machine-config-daemon-78jnv" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.863641 4651 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.863653 4651 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.863662 4651 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.863672 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.863685 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.863693 4651 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.863702 4651 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.863710 4651 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.863253 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.863734 4651 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.863758 4651 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.863379 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.863420 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.863583 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.863651 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.863742 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.863791 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.864000 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.864016 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.864052 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.864059 4651 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.865006 4651 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.864067 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.865156 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.864122 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: E1011 04:51:39.864156 4651 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.864239 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.864901 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.865394 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.865449 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.865772 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.865377 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.865884 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 11 04:51:39 crc kubenswrapper[4651]: E1011 04:51:39.865895 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-11 04:51:40.3653591 +0000 UTC m=+21.261591896 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.865923 4651 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.865941 4651 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.865953 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.865959 4651 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.865990 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.866003 4651 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.866013 4651 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.866022 4651 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.866085 4651 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.866095 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.866104 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.866113 4651 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.866122 4651 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.866130 4651 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.866139 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.866148 4651 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.866147 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.866157 4651 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.866204 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.866272 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.866286 4651 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.866332 4651 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.866346 4651 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.866359 4651 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.866374 4651 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.866388 4651 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.866402 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.866415 4651 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.866428 4651 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.866444 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.866445 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.866456 4651 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.866470 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.866483 4651 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.866496 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.866509 4651 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.866521 4651 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.866533 4651 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.866546 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.866558 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.866566 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.866574 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.866600 4651 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.866613 4651 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.866623 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.866634 4651 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.866644 4651 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.866658 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.866662 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.866670 4651 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.866693 4651 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.866708 4651 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.866709 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.866721 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.866745 4651 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.866756 4651 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.866765 4651 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.866775 4651 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.866785 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.866794 4651 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.866803 4651 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.866829 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.866838 4651 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.866846 4651 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.866855 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.866863 4651 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.866872 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.866880 4651 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.866889 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.866898 4651 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.867005 4651 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.867015 4651 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.867027 4651 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.867036 4651 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.867046 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.867083 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.867097 4651 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.867110 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.866754 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.867060 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.867059 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.867523 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.867484 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.867787 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.868312 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.868408 4651 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.868418 4651 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.868429 4651 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.868494 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.868509 4651 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.868519 4651 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.868530 4651 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.868539 4651 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.868548 4651 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.868557 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.868567 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.868577 4651 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.868585 4651 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.868594 4651 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.868603 4651 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.868611 4651 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.868620 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.868629 4651 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.868638 4651 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.868646 4651 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.868655 4651 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.868664 4651 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.868671 4651 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.868680 4651 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.868691 4651 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.868701 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.868710 4651 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.868718 4651 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.868726 4651 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.868734 4651 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.868742 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.868751 4651 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.868760 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.868768 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.868776 4651 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.868784 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.868793 4651 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.868801 4651 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.868901 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.869409 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.870335 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.870555 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.870708 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.870996 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.871203 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.872878 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.877019 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.877942 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.878562 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kwhmr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbcac3cb-b774-47ec-a86c-b22191d14d99\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7n87\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kwhmr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.879944 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.880550 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.880721 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.880970 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.882997 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.883117 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.883298 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.883539 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.883645 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.883659 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: E1011 04:51:39.883615 4651 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 11 04:51:39 crc kubenswrapper[4651]: E1011 04:51:39.883709 4651 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 11 04:51:39 crc kubenswrapper[4651]: E1011 04:51:39.883721 4651 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 11 04:51:39 crc kubenswrapper[4651]: E1011 04:51:39.883773 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-11 04:51:40.383757106 +0000 UTC m=+21.279989902 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.884100 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.883567 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.884840 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.884932 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.885035 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.887202 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.888421 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.888948 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.889447 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.889495 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.889559 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.889474 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.889863 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: E1011 04:51:39.889955 4651 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 11 04:51:39 crc kubenswrapper[4651]: E1011 04:51:39.889978 4651 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 11 04:51:39 crc kubenswrapper[4651]: E1011 04:51:39.889993 4651 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.890020 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: E1011 04:51:39.890048 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-11 04:51:40.390032085 +0000 UTC m=+21.286264951 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.890158 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.891106 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.893394 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.893545 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.893781 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.893919 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.894181 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.894133 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.894911 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.895893 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.896288 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.896335 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.896393 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.896728 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.897528 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.901323 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.901962 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.903456 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.904571 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.906979 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.907945 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.909082 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.910025 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.910914 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.911905 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.912481 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.914675 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.915306 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.917384 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.918072 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.918755 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.919204 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.920334 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.920701 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.923469 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.923805 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.924667 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.925235 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.925694 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.926182 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.927803 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.928214 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.929209 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.930080 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.931512 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.932128 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.933010 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.933699 4651 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.934023 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"519a1ae1-e964-48b0-8b61-835146df28c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wkk99\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wkk99\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-78jnv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.934173 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.939795 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.940634 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.942593 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.944564 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.945713 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.947530 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.948162 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.948409 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.951331 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.951939 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.953230 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.954530 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.955154 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.956028 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.956933 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.957789 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.958585 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.959284 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"519a1ae1-e964-48b0-8b61-835146df28c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wkk99\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wkk99\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-78jnv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.959400 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.959847 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.960291 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.962558 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.963151 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.964046 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.968047 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-phsgk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e8fb74d-013d-4103-b029-e8416d079dcf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4ftf8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-phsgk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.969859 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/9e8fb74d-013d-4103-b029-e8416d079dcf-serviceca\") pod \"node-ca-phsgk\" (UID: \"9e8fb74d-013d-4103-b029-e8416d079dcf\") " pod="openshift-image-registry/node-ca-phsgk" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.969887 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/519a1ae1-e964-48b0-8b61-835146df28c1-rootfs\") pod \"machine-config-daemon-78jnv\" (UID: \"519a1ae1-e964-48b0-8b61-835146df28c1\") " pod="openshift-machine-config-operator/machine-config-daemon-78jnv" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.969928 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4ftf8\" (UniqueName: \"kubernetes.io/projected/9e8fb74d-013d-4103-b029-e8416d079dcf-kube-api-access-4ftf8\") pod \"node-ca-phsgk\" (UID: \"9e8fb74d-013d-4103-b029-e8416d079dcf\") " pod="openshift-image-registry/node-ca-phsgk" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.969950 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z7n87\" (UniqueName: \"kubernetes.io/projected/dbcac3cb-b774-47ec-a86c-b22191d14d99-kube-api-access-z7n87\") pod \"node-resolver-kwhmr\" (UID: \"dbcac3cb-b774-47ec-a86c-b22191d14d99\") " pod="openshift-dns/node-resolver-kwhmr" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.969966 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/519a1ae1-e964-48b0-8b61-835146df28c1-proxy-tls\") pod \"machine-config-daemon-78jnv\" (UID: \"519a1ae1-e964-48b0-8b61-835146df28c1\") " pod="openshift-machine-config-operator/machine-config-daemon-78jnv" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.969989 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9e8fb74d-013d-4103-b029-e8416d079dcf-host\") pod \"node-ca-phsgk\" (UID: \"9e8fb74d-013d-4103-b029-e8416d079dcf\") " pod="openshift-image-registry/node-ca-phsgk" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.970032 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/dbcac3cb-b774-47ec-a86c-b22191d14d99-hosts-file\") pod \"node-resolver-kwhmr\" (UID: \"dbcac3cb-b774-47ec-a86c-b22191d14d99\") " pod="openshift-dns/node-resolver-kwhmr" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.970080 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.970106 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/519a1ae1-e964-48b0-8b61-835146df28c1-mcd-auth-proxy-config\") pod \"machine-config-daemon-78jnv\" (UID: \"519a1ae1-e964-48b0-8b61-835146df28c1\") " pod="openshift-machine-config-operator/machine-config-daemon-78jnv" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.970121 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wkk99\" (UniqueName: \"kubernetes.io/projected/519a1ae1-e964-48b0-8b61-835146df28c1-kube-api-access-wkk99\") pod \"machine-config-daemon-78jnv\" (UID: \"519a1ae1-e964-48b0-8b61-835146df28c1\") " pod="openshift-machine-config-operator/machine-config-daemon-78jnv" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.970144 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.970183 4651 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.970193 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.970203 4651 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.970211 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.970220 4651 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.970230 4651 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.970238 4651 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.970246 4651 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.970254 4651 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.970264 4651 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.970272 4651 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.970280 4651 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.970289 4651 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.970297 4651 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.970305 4651 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.970314 4651 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.970322 4651 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.970331 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.970340 4651 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.970349 4651 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.970357 4651 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.970366 4651 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.970375 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.970384 4651 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.970392 4651 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.970401 4651 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.970478 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.970513 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9e8fb74d-013d-4103-b029-e8416d079dcf-host\") pod \"node-ca-phsgk\" (UID: \"9e8fb74d-013d-4103-b029-e8416d079dcf\") " pod="openshift-image-registry/node-ca-phsgk" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.970549 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/dbcac3cb-b774-47ec-a86c-b22191d14d99-hosts-file\") pod \"node-resolver-kwhmr\" (UID: \"dbcac3cb-b774-47ec-a86c-b22191d14d99\") " pod="openshift-dns/node-resolver-kwhmr" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.971703 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/9e8fb74d-013d-4103-b029-e8416d079dcf-serviceca\") pod \"node-ca-phsgk\" (UID: \"9e8fb74d-013d-4103-b029-e8416d079dcf\") " pod="openshift-image-registry/node-ca-phsgk" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.971845 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/519a1ae1-e964-48b0-8b61-835146df28c1-rootfs\") pod \"machine-config-daemon-78jnv\" (UID: \"519a1ae1-e964-48b0-8b61-835146df28c1\") " pod="openshift-machine-config-operator/machine-config-daemon-78jnv" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.972179 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/519a1ae1-e964-48b0-8b61-835146df28c1-mcd-auth-proxy-config\") pod \"machine-config-daemon-78jnv\" (UID: \"519a1ae1-e964-48b0-8b61-835146df28c1\") " pod="openshift-machine-config-operator/machine-config-daemon-78jnv" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.972679 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.972703 4651 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.972711 4651 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.973191 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.973345 4651 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.973365 4651 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.973376 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.973389 4651 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.973402 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.973414 4651 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.973426 4651 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.973438 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.973448 4651 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.973458 4651 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.973468 4651 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.973477 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.973485 4651 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.973493 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.973501 4651 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.973509 4651 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.973517 4651 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.973526 4651 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.973534 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.973543 4651 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.973551 4651 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.973559 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.973568 4651 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.973576 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.973584 4651 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.973592 4651 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.973600 4651 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.973609 4651 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.973617 4651 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.973626 4651 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.973635 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.973643 4651 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.973651 4651 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.973660 4651 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.973669 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.973697 4651 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.973705 4651 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.973713 4651 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.973722 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.973730 4651 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.973738 4651 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.987582 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/519a1ae1-e964-48b0-8b61-835146df28c1-proxy-tls\") pod \"machine-config-daemon-78jnv\" (UID: \"519a1ae1-e964-48b0-8b61-835146df28c1\") " pod="openshift-machine-config-operator/machine-config-daemon-78jnv" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.989099 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.998254 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4ftf8\" (UniqueName: \"kubernetes.io/projected/9e8fb74d-013d-4103-b029-e8416d079dcf-kube-api-access-4ftf8\") pod \"node-ca-phsgk\" (UID: \"9e8fb74d-013d-4103-b029-e8416d079dcf\") " pod="openshift-image-registry/node-ca-phsgk" Oct 11 04:51:39 crc kubenswrapper[4651]: I1011 04:51:39.999263 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z7n87\" (UniqueName: \"kubernetes.io/projected/dbcac3cb-b774-47ec-a86c-b22191d14d99-kube-api-access-z7n87\") pod \"node-resolver-kwhmr\" (UID: \"dbcac3cb-b774-47ec-a86c-b22191d14d99\") " pod="openshift-dns/node-resolver-kwhmr" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.001050 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wkk99\" (UniqueName: \"kubernetes.io/projected/519a1ae1-e964-48b0-8b61-835146df28c1-kube-api-access-wkk99\") pod \"machine-config-daemon-78jnv\" (UID: \"519a1ae1-e964-48b0-8b61-835146df28c1\") " pod="openshift-machine-config-operator/machine-config-daemon-78jnv" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.004258 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.013304 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kwhmr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbcac3cb-b774-47ec-a86c-b22191d14d99\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7n87\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kwhmr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.023602 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.033091 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.044846 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.055686 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-wz4hw"] Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.056328 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-wz4hw" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.058060 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-6zt9s"] Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.058513 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.058731 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.058989 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.058991 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.059414 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-pgwvb"] Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.059754 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.061137 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-pgwvb" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.062359 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.062494 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.062608 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.062833 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.062964 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.063225 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.063392 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.063495 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.063433 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.065345 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-phsgk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e8fb74d-013d-4103-b029-e8416d079dcf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4ftf8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-phsgk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.067982 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.074040 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.086619 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.093772 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kwhmr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbcac3cb-b774-47ec-a86c-b22191d14d99\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7n87\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kwhmr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.095910 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.103217 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.103294 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 11 04:51:40 crc kubenswrapper[4651]: W1011 04:51:40.107412 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod37a5e44f_9a88_4405_be8a_b645485e7312.slice/crio-31c08851de25529aa13935b430e40df1f8c5951d689031cd079f0e8a12eff696 WatchSource:0}: Error finding container 31c08851de25529aa13935b430e40df1f8c5951d689031cd079f0e8a12eff696: Status 404 returned error can't find the container with id 31c08851de25529aa13935b430e40df1f8c5951d689031cd079f0e8a12eff696 Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.108892 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-phsgk" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.115171 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.117633 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.120472 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.125353 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.129852 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.135751 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-kwhmr" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.144569 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"519a1ae1-e964-48b0-8b61-835146df28c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wkk99\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wkk99\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-78jnv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.164092 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wz4hw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fbfdd781-994b-49b4-9c8e-edc0ea4145d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c96nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wz4hw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.174739 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.175205 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-host-slash\") pod \"ovnkube-node-6zt9s\" (UID: \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\") " pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.175299 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-ovnkube-config\") pod \"ovnkube-node-6zt9s\" (UID: \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\") " pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.175398 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/215170a8-84a9-4e15-9b0e-c1200c680f30-cni-binary-copy\") pod \"multus-additional-cni-plugins-pgwvb\" (UID: \"215170a8-84a9-4e15-9b0e-c1200c680f30\") " pod="openshift-multus/multus-additional-cni-plugins-pgwvb" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.175467 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/fbfdd781-994b-49b4-9c8e-edc0ea4145d1-hostroot\") pod \"multus-wz4hw\" (UID: \"fbfdd781-994b-49b4-9c8e-edc0ea4145d1\") " pod="openshift-multus/multus-wz4hw" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.175536 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/fbfdd781-994b-49b4-9c8e-edc0ea4145d1-multus-daemon-config\") pod \"multus-wz4hw\" (UID: \"fbfdd781-994b-49b4-9c8e-edc0ea4145d1\") " pod="openshift-multus/multus-wz4hw" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.175634 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/215170a8-84a9-4e15-9b0e-c1200c680f30-os-release\") pod \"multus-additional-cni-plugins-pgwvb\" (UID: \"215170a8-84a9-4e15-9b0e-c1200c680f30\") " pod="openshift-multus/multus-additional-cni-plugins-pgwvb" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.175718 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-host-cni-netd\") pod \"ovnkube-node-6zt9s\" (UID: \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\") " pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.175796 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/fbfdd781-994b-49b4-9c8e-edc0ea4145d1-multus-conf-dir\") pod \"multus-wz4hw\" (UID: \"fbfdd781-994b-49b4-9c8e-edc0ea4145d1\") " pod="openshift-multus/multus-wz4hw" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.175897 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-node-log\") pod \"ovnkube-node-6zt9s\" (UID: \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\") " pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.175994 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/fbfdd781-994b-49b4-9c8e-edc0ea4145d1-host-var-lib-kubelet\") pod \"multus-wz4hw\" (UID: \"fbfdd781-994b-49b4-9c8e-edc0ea4145d1\") " pod="openshift-multus/multus-wz4hw" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.176084 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-var-lib-openvswitch\") pod \"ovnkube-node-6zt9s\" (UID: \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\") " pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.176216 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/215170a8-84a9-4e15-9b0e-c1200c680f30-system-cni-dir\") pod \"multus-additional-cni-plugins-pgwvb\" (UID: \"215170a8-84a9-4e15-9b0e-c1200c680f30\") " pod="openshift-multus/multus-additional-cni-plugins-pgwvb" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.176266 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-host-cni-bin\") pod \"ovnkube-node-6zt9s\" (UID: \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\") " pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.176532 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9tctk\" (UniqueName: \"kubernetes.io/projected/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-kube-api-access-9tctk\") pod \"ovnkube-node-6zt9s\" (UID: \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\") " pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.176689 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/fbfdd781-994b-49b4-9c8e-edc0ea4145d1-system-cni-dir\") pod \"multus-wz4hw\" (UID: \"fbfdd781-994b-49b4-9c8e-edc0ea4145d1\") " pod="openshift-multus/multus-wz4hw" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.176787 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/fbfdd781-994b-49b4-9c8e-edc0ea4145d1-host-var-lib-cni-bin\") pod \"multus-wz4hw\" (UID: \"fbfdd781-994b-49b4-9c8e-edc0ea4145d1\") " pod="openshift-multus/multus-wz4hw" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.176885 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c96nx\" (UniqueName: \"kubernetes.io/projected/fbfdd781-994b-49b4-9c8e-edc0ea4145d1-kube-api-access-c96nx\") pod \"multus-wz4hw\" (UID: \"fbfdd781-994b-49b4-9c8e-edc0ea4145d1\") " pod="openshift-multus/multus-wz4hw" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.176997 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/215170a8-84a9-4e15-9b0e-c1200c680f30-cnibin\") pod \"multus-additional-cni-plugins-pgwvb\" (UID: \"215170a8-84a9-4e15-9b0e-c1200c680f30\") " pod="openshift-multus/multus-additional-cni-plugins-pgwvb" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.177035 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/fbfdd781-994b-49b4-9c8e-edc0ea4145d1-cni-binary-copy\") pod \"multus-wz4hw\" (UID: \"fbfdd781-994b-49b4-9c8e-edc0ea4145d1\") " pod="openshift-multus/multus-wz4hw" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.177059 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/215170a8-84a9-4e15-9b0e-c1200c680f30-tuning-conf-dir\") pod \"multus-additional-cni-plugins-pgwvb\" (UID: \"215170a8-84a9-4e15-9b0e-c1200c680f30\") " pod="openshift-multus/multus-additional-cni-plugins-pgwvb" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.177081 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-run-ovn\") pod \"ovnkube-node-6zt9s\" (UID: \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\") " pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.177104 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/fbfdd781-994b-49b4-9c8e-edc0ea4145d1-os-release\") pod \"multus-wz4hw\" (UID: \"fbfdd781-994b-49b4-9c8e-edc0ea4145d1\") " pod="openshift-multus/multus-wz4hw" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.177125 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/fbfdd781-994b-49b4-9c8e-edc0ea4145d1-multus-cni-dir\") pod \"multus-wz4hw\" (UID: \"fbfdd781-994b-49b4-9c8e-edc0ea4145d1\") " pod="openshift-multus/multus-wz4hw" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.177148 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-52sdp\" (UniqueName: \"kubernetes.io/projected/215170a8-84a9-4e15-9b0e-c1200c680f30-kube-api-access-52sdp\") pod \"multus-additional-cni-plugins-pgwvb\" (UID: \"215170a8-84a9-4e15-9b0e-c1200c680f30\") " pod="openshift-multus/multus-additional-cni-plugins-pgwvb" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.177170 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-host-kubelet\") pod \"ovnkube-node-6zt9s\" (UID: \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\") " pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.177192 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-host-run-netns\") pod \"ovnkube-node-6zt9s\" (UID: \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\") " pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.177269 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/fbfdd781-994b-49b4-9c8e-edc0ea4145d1-host-var-lib-cni-multus\") pod \"multus-wz4hw\" (UID: \"fbfdd781-994b-49b4-9c8e-edc0ea4145d1\") " pod="openshift-multus/multus-wz4hw" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.177328 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/fbfdd781-994b-49b4-9c8e-edc0ea4145d1-host-run-multus-certs\") pod \"multus-wz4hw\" (UID: \"fbfdd781-994b-49b4-9c8e-edc0ea4145d1\") " pod="openshift-multus/multus-wz4hw" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.177352 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-etc-openvswitch\") pod \"ovnkube-node-6zt9s\" (UID: \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\") " pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.177406 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/215170a8-84a9-4e15-9b0e-c1200c680f30-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-pgwvb\" (UID: \"215170a8-84a9-4e15-9b0e-c1200c680f30\") " pod="openshift-multus/multus-additional-cni-plugins-pgwvb" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.177433 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-run-openvswitch\") pod \"ovnkube-node-6zt9s\" (UID: \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\") " pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.177459 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/fbfdd781-994b-49b4-9c8e-edc0ea4145d1-cnibin\") pod \"multus-wz4hw\" (UID: \"fbfdd781-994b-49b4-9c8e-edc0ea4145d1\") " pod="openshift-multus/multus-wz4hw" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.177486 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-run-systemd\") pod \"ovnkube-node-6zt9s\" (UID: \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\") " pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.177508 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/fbfdd781-994b-49b4-9c8e-edc0ea4145d1-etc-kubernetes\") pod \"multus-wz4hw\" (UID: \"fbfdd781-994b-49b4-9c8e-edc0ea4145d1\") " pod="openshift-multus/multus-wz4hw" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.177539 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-6zt9s\" (UID: \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\") " pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.177565 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/fbfdd781-994b-49b4-9c8e-edc0ea4145d1-multus-socket-dir-parent\") pod \"multus-wz4hw\" (UID: \"fbfdd781-994b-49b4-9c8e-edc0ea4145d1\") " pod="openshift-multus/multus-wz4hw" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.177595 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-host-run-ovn-kubernetes\") pod \"ovnkube-node-6zt9s\" (UID: \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\") " pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.177617 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/fbfdd781-994b-49b4-9c8e-edc0ea4145d1-host-run-netns\") pod \"multus-wz4hw\" (UID: \"fbfdd781-994b-49b4-9c8e-edc0ea4145d1\") " pod="openshift-multus/multus-wz4hw" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.177639 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-systemd-units\") pod \"ovnkube-node-6zt9s\" (UID: \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\") " pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.177663 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-log-socket\") pod \"ovnkube-node-6zt9s\" (UID: \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\") " pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.177696 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-ovn-node-metrics-cert\") pod \"ovnkube-node-6zt9s\" (UID: \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\") " pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.177717 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-ovnkube-script-lib\") pod \"ovnkube-node-6zt9s\" (UID: \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\") " pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.177771 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/fbfdd781-994b-49b4-9c8e-edc0ea4145d1-host-run-k8s-cni-cncf-io\") pod \"multus-wz4hw\" (UID: \"fbfdd781-994b-49b4-9c8e-edc0ea4145d1\") " pod="openshift-multus/multus-wz4hw" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.177851 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-env-overrides\") pod \"ovnkube-node-6zt9s\" (UID: \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\") " pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.185615 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.197379 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kwhmr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbcac3cb-b774-47ec-a86c-b22191d14d99\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7n87\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kwhmr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.217055 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6zt9s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.234446 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pgwvb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"215170a8-84a9-4e15-9b0e-c1200c680f30\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pgwvb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.247036 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.257506 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.276051 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.278186 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/fbfdd781-994b-49b4-9c8e-edc0ea4145d1-hostroot\") pod \"multus-wz4hw\" (UID: \"fbfdd781-994b-49b4-9c8e-edc0ea4145d1\") " pod="openshift-multus/multus-wz4hw" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.278221 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/fbfdd781-994b-49b4-9c8e-edc0ea4145d1-multus-daemon-config\") pod \"multus-wz4hw\" (UID: \"fbfdd781-994b-49b4-9c8e-edc0ea4145d1\") " pod="openshift-multus/multus-wz4hw" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.278246 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/215170a8-84a9-4e15-9b0e-c1200c680f30-os-release\") pod \"multus-additional-cni-plugins-pgwvb\" (UID: \"215170a8-84a9-4e15-9b0e-c1200c680f30\") " pod="openshift-multus/multus-additional-cni-plugins-pgwvb" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.278270 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-host-cni-netd\") pod \"ovnkube-node-6zt9s\" (UID: \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\") " pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.278286 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/fbfdd781-994b-49b4-9c8e-edc0ea4145d1-multus-conf-dir\") pod \"multus-wz4hw\" (UID: \"fbfdd781-994b-49b4-9c8e-edc0ea4145d1\") " pod="openshift-multus/multus-wz4hw" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.278299 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-node-log\") pod \"ovnkube-node-6zt9s\" (UID: \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\") " pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.278314 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/fbfdd781-994b-49b4-9c8e-edc0ea4145d1-host-var-lib-kubelet\") pod \"multus-wz4hw\" (UID: \"fbfdd781-994b-49b4-9c8e-edc0ea4145d1\") " pod="openshift-multus/multus-wz4hw" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.278311 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/fbfdd781-994b-49b4-9c8e-edc0ea4145d1-hostroot\") pod \"multus-wz4hw\" (UID: \"fbfdd781-994b-49b4-9c8e-edc0ea4145d1\") " pod="openshift-multus/multus-wz4hw" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.278328 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-var-lib-openvswitch\") pod \"ovnkube-node-6zt9s\" (UID: \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\") " pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.278342 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/215170a8-84a9-4e15-9b0e-c1200c680f30-system-cni-dir\") pod \"multus-additional-cni-plugins-pgwvb\" (UID: \"215170a8-84a9-4e15-9b0e-c1200c680f30\") " pod="openshift-multus/multus-additional-cni-plugins-pgwvb" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.278356 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-host-cni-bin\") pod \"ovnkube-node-6zt9s\" (UID: \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\") " pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.278371 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9tctk\" (UniqueName: \"kubernetes.io/projected/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-kube-api-access-9tctk\") pod \"ovnkube-node-6zt9s\" (UID: \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\") " pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.278386 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/fbfdd781-994b-49b4-9c8e-edc0ea4145d1-system-cni-dir\") pod \"multus-wz4hw\" (UID: \"fbfdd781-994b-49b4-9c8e-edc0ea4145d1\") " pod="openshift-multus/multus-wz4hw" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.278399 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/fbfdd781-994b-49b4-9c8e-edc0ea4145d1-host-var-lib-cni-bin\") pod \"multus-wz4hw\" (UID: \"fbfdd781-994b-49b4-9c8e-edc0ea4145d1\") " pod="openshift-multus/multus-wz4hw" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.278415 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c96nx\" (UniqueName: \"kubernetes.io/projected/fbfdd781-994b-49b4-9c8e-edc0ea4145d1-kube-api-access-c96nx\") pod \"multus-wz4hw\" (UID: \"fbfdd781-994b-49b4-9c8e-edc0ea4145d1\") " pod="openshift-multus/multus-wz4hw" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.278440 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/215170a8-84a9-4e15-9b0e-c1200c680f30-cnibin\") pod \"multus-additional-cni-plugins-pgwvb\" (UID: \"215170a8-84a9-4e15-9b0e-c1200c680f30\") " pod="openshift-multus/multus-additional-cni-plugins-pgwvb" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.278454 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/fbfdd781-994b-49b4-9c8e-edc0ea4145d1-cni-binary-copy\") pod \"multus-wz4hw\" (UID: \"fbfdd781-994b-49b4-9c8e-edc0ea4145d1\") " pod="openshift-multus/multus-wz4hw" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.278468 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/215170a8-84a9-4e15-9b0e-c1200c680f30-tuning-conf-dir\") pod \"multus-additional-cni-plugins-pgwvb\" (UID: \"215170a8-84a9-4e15-9b0e-c1200c680f30\") " pod="openshift-multus/multus-additional-cni-plugins-pgwvb" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.278482 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-run-ovn\") pod \"ovnkube-node-6zt9s\" (UID: \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\") " pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.278499 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/fbfdd781-994b-49b4-9c8e-edc0ea4145d1-os-release\") pod \"multus-wz4hw\" (UID: \"fbfdd781-994b-49b4-9c8e-edc0ea4145d1\") " pod="openshift-multus/multus-wz4hw" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.278515 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/fbfdd781-994b-49b4-9c8e-edc0ea4145d1-multus-cni-dir\") pod \"multus-wz4hw\" (UID: \"fbfdd781-994b-49b4-9c8e-edc0ea4145d1\") " pod="openshift-multus/multus-wz4hw" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.278530 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-52sdp\" (UniqueName: \"kubernetes.io/projected/215170a8-84a9-4e15-9b0e-c1200c680f30-kube-api-access-52sdp\") pod \"multus-additional-cni-plugins-pgwvb\" (UID: \"215170a8-84a9-4e15-9b0e-c1200c680f30\") " pod="openshift-multus/multus-additional-cni-plugins-pgwvb" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.278545 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-host-kubelet\") pod \"ovnkube-node-6zt9s\" (UID: \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\") " pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.278560 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-host-run-netns\") pod \"ovnkube-node-6zt9s\" (UID: \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\") " pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.278576 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/fbfdd781-994b-49b4-9c8e-edc0ea4145d1-host-var-lib-cni-multus\") pod \"multus-wz4hw\" (UID: \"fbfdd781-994b-49b4-9c8e-edc0ea4145d1\") " pod="openshift-multus/multus-wz4hw" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.278591 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/fbfdd781-994b-49b4-9c8e-edc0ea4145d1-host-run-multus-certs\") pod \"multus-wz4hw\" (UID: \"fbfdd781-994b-49b4-9c8e-edc0ea4145d1\") " pod="openshift-multus/multus-wz4hw" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.278604 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-etc-openvswitch\") pod \"ovnkube-node-6zt9s\" (UID: \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\") " pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.278626 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/215170a8-84a9-4e15-9b0e-c1200c680f30-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-pgwvb\" (UID: \"215170a8-84a9-4e15-9b0e-c1200c680f30\") " pod="openshift-multus/multus-additional-cni-plugins-pgwvb" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.278642 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-run-openvswitch\") pod \"ovnkube-node-6zt9s\" (UID: \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\") " pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.278657 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/fbfdd781-994b-49b4-9c8e-edc0ea4145d1-cnibin\") pod \"multus-wz4hw\" (UID: \"fbfdd781-994b-49b4-9c8e-edc0ea4145d1\") " pod="openshift-multus/multus-wz4hw" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.278674 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-run-systemd\") pod \"ovnkube-node-6zt9s\" (UID: \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\") " pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.278689 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/fbfdd781-994b-49b4-9c8e-edc0ea4145d1-etc-kubernetes\") pod \"multus-wz4hw\" (UID: \"fbfdd781-994b-49b4-9c8e-edc0ea4145d1\") " pod="openshift-multus/multus-wz4hw" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.278705 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-6zt9s\" (UID: \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\") " pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.278720 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/fbfdd781-994b-49b4-9c8e-edc0ea4145d1-multus-socket-dir-parent\") pod \"multus-wz4hw\" (UID: \"fbfdd781-994b-49b4-9c8e-edc0ea4145d1\") " pod="openshift-multus/multus-wz4hw" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.278740 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-host-run-ovn-kubernetes\") pod \"ovnkube-node-6zt9s\" (UID: \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\") " pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.278754 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/fbfdd781-994b-49b4-9c8e-edc0ea4145d1-host-run-netns\") pod \"multus-wz4hw\" (UID: \"fbfdd781-994b-49b4-9c8e-edc0ea4145d1\") " pod="openshift-multus/multus-wz4hw" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.278772 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-systemd-units\") pod \"ovnkube-node-6zt9s\" (UID: \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\") " pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.278792 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-log-socket\") pod \"ovnkube-node-6zt9s\" (UID: \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\") " pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.278812 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-ovn-node-metrics-cert\") pod \"ovnkube-node-6zt9s\" (UID: \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\") " pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.278843 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-ovnkube-script-lib\") pod \"ovnkube-node-6zt9s\" (UID: \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\") " pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.278858 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/fbfdd781-994b-49b4-9c8e-edc0ea4145d1-host-run-k8s-cni-cncf-io\") pod \"multus-wz4hw\" (UID: \"fbfdd781-994b-49b4-9c8e-edc0ea4145d1\") " pod="openshift-multus/multus-wz4hw" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.278874 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-env-overrides\") pod \"ovnkube-node-6zt9s\" (UID: \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\") " pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.278887 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-host-slash\") pod \"ovnkube-node-6zt9s\" (UID: \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\") " pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.278901 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-ovnkube-config\") pod \"ovnkube-node-6zt9s\" (UID: \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\") " pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.278923 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/215170a8-84a9-4e15-9b0e-c1200c680f30-cni-binary-copy\") pod \"multus-additional-cni-plugins-pgwvb\" (UID: \"215170a8-84a9-4e15-9b0e-c1200c680f30\") " pod="openshift-multus/multus-additional-cni-plugins-pgwvb" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.278979 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-host-kubelet\") pod \"ovnkube-node-6zt9s\" (UID: \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\") " pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.279268 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/fbfdd781-994b-49b4-9c8e-edc0ea4145d1-system-cni-dir\") pod \"multus-wz4hw\" (UID: \"fbfdd781-994b-49b4-9c8e-edc0ea4145d1\") " pod="openshift-multus/multus-wz4hw" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.279450 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/215170a8-84a9-4e15-9b0e-c1200c680f30-os-release\") pod \"multus-additional-cni-plugins-pgwvb\" (UID: \"215170a8-84a9-4e15-9b0e-c1200c680f30\") " pod="openshift-multus/multus-additional-cni-plugins-pgwvb" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.279480 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-host-cni-netd\") pod \"ovnkube-node-6zt9s\" (UID: \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\") " pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.279523 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/215170a8-84a9-4e15-9b0e-c1200c680f30-cni-binary-copy\") pod \"multus-additional-cni-plugins-pgwvb\" (UID: \"215170a8-84a9-4e15-9b0e-c1200c680f30\") " pod="openshift-multus/multus-additional-cni-plugins-pgwvb" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.279543 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-node-log\") pod \"ovnkube-node-6zt9s\" (UID: \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\") " pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.279525 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/fbfdd781-994b-49b4-9c8e-edc0ea4145d1-multus-conf-dir\") pod \"multus-wz4hw\" (UID: \"fbfdd781-994b-49b4-9c8e-edc0ea4145d1\") " pod="openshift-multus/multus-wz4hw" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.279583 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-host-run-netns\") pod \"ovnkube-node-6zt9s\" (UID: \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\") " pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.279568 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/fbfdd781-994b-49b4-9c8e-edc0ea4145d1-host-var-lib-kubelet\") pod \"multus-wz4hw\" (UID: \"fbfdd781-994b-49b4-9c8e-edc0ea4145d1\") " pod="openshift-multus/multus-wz4hw" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.279585 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/fbfdd781-994b-49b4-9c8e-edc0ea4145d1-multus-daemon-config\") pod \"multus-wz4hw\" (UID: \"fbfdd781-994b-49b4-9c8e-edc0ea4145d1\") " pod="openshift-multus/multus-wz4hw" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.279609 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/fbfdd781-994b-49b4-9c8e-edc0ea4145d1-host-var-lib-cni-bin\") pod \"multus-wz4hw\" (UID: \"fbfdd781-994b-49b4-9c8e-edc0ea4145d1\") " pod="openshift-multus/multus-wz4hw" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.279610 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/fbfdd781-994b-49b4-9c8e-edc0ea4145d1-host-var-lib-cni-multus\") pod \"multus-wz4hw\" (UID: \"fbfdd781-994b-49b4-9c8e-edc0ea4145d1\") " pod="openshift-multus/multus-wz4hw" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.279634 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-var-lib-openvswitch\") pod \"ovnkube-node-6zt9s\" (UID: \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\") " pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.279644 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-etc-openvswitch\") pod \"ovnkube-node-6zt9s\" (UID: \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\") " pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.279628 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/fbfdd781-994b-49b4-9c8e-edc0ea4145d1-host-run-multus-certs\") pod \"multus-wz4hw\" (UID: \"fbfdd781-994b-49b4-9c8e-edc0ea4145d1\") " pod="openshift-multus/multus-wz4hw" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.280103 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/215170a8-84a9-4e15-9b0e-c1200c680f30-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-pgwvb\" (UID: \"215170a8-84a9-4e15-9b0e-c1200c680f30\") " pod="openshift-multus/multus-additional-cni-plugins-pgwvb" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.280491 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-ovnkube-script-lib\") pod \"ovnkube-node-6zt9s\" (UID: \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\") " pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.280615 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-ovnkube-config\") pod \"ovnkube-node-6zt9s\" (UID: \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\") " pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.280658 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/fbfdd781-994b-49b4-9c8e-edc0ea4145d1-multus-socket-dir-parent\") pod \"multus-wz4hw\" (UID: \"fbfdd781-994b-49b4-9c8e-edc0ea4145d1\") " pod="openshift-multus/multus-wz4hw" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.280669 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-run-systemd\") pod \"ovnkube-node-6zt9s\" (UID: \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\") " pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.280713 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/fbfdd781-994b-49b4-9c8e-edc0ea4145d1-cnibin\") pod \"multus-wz4hw\" (UID: \"fbfdd781-994b-49b4-9c8e-edc0ea4145d1\") " pod="openshift-multus/multus-wz4hw" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.280707 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-host-slash\") pod \"ovnkube-node-6zt9s\" (UID: \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\") " pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.280731 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-systemd-units\") pod \"ovnkube-node-6zt9s\" (UID: \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\") " pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.280734 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/215170a8-84a9-4e15-9b0e-c1200c680f30-system-cni-dir\") pod \"multus-additional-cni-plugins-pgwvb\" (UID: \"215170a8-84a9-4e15-9b0e-c1200c680f30\") " pod="openshift-multus/multus-additional-cni-plugins-pgwvb" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.280758 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-host-run-ovn-kubernetes\") pod \"ovnkube-node-6zt9s\" (UID: \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\") " pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.280759 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-6zt9s\" (UID: \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\") " pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.280760 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-host-cni-bin\") pod \"ovnkube-node-6zt9s\" (UID: \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\") " pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.280781 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/fbfdd781-994b-49b4-9c8e-edc0ea4145d1-host-run-netns\") pod \"multus-wz4hw\" (UID: \"fbfdd781-994b-49b4-9c8e-edc0ea4145d1\") " pod="openshift-multus/multus-wz4hw" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.280805 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/215170a8-84a9-4e15-9b0e-c1200c680f30-cnibin\") pod \"multus-additional-cni-plugins-pgwvb\" (UID: \"215170a8-84a9-4e15-9b0e-c1200c680f30\") " pod="openshift-multus/multus-additional-cni-plugins-pgwvb" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.280805 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-run-ovn\") pod \"ovnkube-node-6zt9s\" (UID: \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\") " pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.280847 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/fbfdd781-994b-49b4-9c8e-edc0ea4145d1-etc-kubernetes\") pod \"multus-wz4hw\" (UID: \"fbfdd781-994b-49b4-9c8e-edc0ea4145d1\") " pod="openshift-multus/multus-wz4hw" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.280847 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-log-socket\") pod \"ovnkube-node-6zt9s\" (UID: \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\") " pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.280854 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/fbfdd781-994b-49b4-9c8e-edc0ea4145d1-multus-cni-dir\") pod \"multus-wz4hw\" (UID: \"fbfdd781-994b-49b4-9c8e-edc0ea4145d1\") " pod="openshift-multus/multus-wz4hw" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.280866 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/fbfdd781-994b-49b4-9c8e-edc0ea4145d1-host-run-k8s-cni-cncf-io\") pod \"multus-wz4hw\" (UID: \"fbfdd781-994b-49b4-9c8e-edc0ea4145d1\") " pod="openshift-multus/multus-wz4hw" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.280881 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/fbfdd781-994b-49b4-9c8e-edc0ea4145d1-os-release\") pod \"multus-wz4hw\" (UID: \"fbfdd781-994b-49b4-9c8e-edc0ea4145d1\") " pod="openshift-multus/multus-wz4hw" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.280885 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-run-openvswitch\") pod \"ovnkube-node-6zt9s\" (UID: \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\") " pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.280998 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-env-overrides\") pod \"ovnkube-node-6zt9s\" (UID: \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\") " pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.281423 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/fbfdd781-994b-49b4-9c8e-edc0ea4145d1-cni-binary-copy\") pod \"multus-wz4hw\" (UID: \"fbfdd781-994b-49b4-9c8e-edc0ea4145d1\") " pod="openshift-multus/multus-wz4hw" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.281464 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/215170a8-84a9-4e15-9b0e-c1200c680f30-tuning-conf-dir\") pod \"multus-additional-cni-plugins-pgwvb\" (UID: \"215170a8-84a9-4e15-9b0e-c1200c680f30\") " pod="openshift-multus/multus-additional-cni-plugins-pgwvb" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.286252 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-ovn-node-metrics-cert\") pod \"ovnkube-node-6zt9s\" (UID: \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\") " pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.291081 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wz4hw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fbfdd781-994b-49b4-9c8e-edc0ea4145d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c96nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wz4hw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.306111 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9tctk\" (UniqueName: \"kubernetes.io/projected/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-kube-api-access-9tctk\") pod \"ovnkube-node-6zt9s\" (UID: \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\") " pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.307554 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c96nx\" (UniqueName: \"kubernetes.io/projected/fbfdd781-994b-49b4-9c8e-edc0ea4145d1-kube-api-access-c96nx\") pod \"multus-wz4hw\" (UID: \"fbfdd781-994b-49b4-9c8e-edc0ea4145d1\") " pod="openshift-multus/multus-wz4hw" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.316442 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-52sdp\" (UniqueName: \"kubernetes.io/projected/215170a8-84a9-4e15-9b0e-c1200c680f30-kube-api-access-52sdp\") pod \"multus-additional-cni-plugins-pgwvb\" (UID: \"215170a8-84a9-4e15-9b0e-c1200c680f30\") " pod="openshift-multus/multus-additional-cni-plugins-pgwvb" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.347371 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47b36edf-f918-4105-bb64-66bb71a4b8c4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9521c3af1580aa2a62fc760916de9dc4abdc437700567a6ae5df3a96da2e8942\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6fdcb798e11fca3b7b427f5ef0ac955a893709ffedaa472aaab6c219994e940\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff57b9678d77f617cfc792258091526c2a2f8125c240abfe0b15f693abc948eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11e12782179b96dac12bfdd3a25526e93f18da255e2d2fb0d7522e6d9c373b4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e14956f18479aa61fa0755b9c6c4f26ba7ac18f3023e177a76b9a59101b37c1\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-11T04:51:33Z\\\",\\\"message\\\":\\\"W1011 04:51:22.963111 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1011 04:51:22.963696 1 crypto.go:601] Generating new CA for check-endpoints-signer@1760158282 cert, and key in /tmp/serving-cert-3362973750/serving-signer.crt, /tmp/serving-cert-3362973750/serving-signer.key\\\\nI1011 04:51:23.197526 1 observer_polling.go:159] Starting file observer\\\\nW1011 04:51:23.200452 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1011 04:51:23.200572 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1011 04:51:23.202397 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3362973750/tls.crt::/tmp/serving-cert-3362973750/tls.key\\\\\\\"\\\\nF1011 04:51:33.422520 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f8633747abf191f040cb14f5a76529487743229338070aeca597fc93dae8b3f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55c5a1cac9b1045ca931771e5a096d29e8d986a4d9baae8d779d94f2a30b6cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55c5a1cac9b1045ca931771e5a096d29e8d986a4d9baae8d779d94f2a30b6cd3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.373091 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-wz4hw" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.379626 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.380321 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.380480 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.380525 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 04:51:40 crc kubenswrapper[4651]: E1011 04:51:40.380604 4651 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 11 04:51:40 crc kubenswrapper[4651]: E1011 04:51:40.380641 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-11 04:51:41.380629884 +0000 UTC m=+22.276862680 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 11 04:51:40 crc kubenswrapper[4651]: E1011 04:51:40.380927 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 04:51:41.38088876 +0000 UTC m=+22.277121556 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 04:51:40 crc kubenswrapper[4651]: E1011 04:51:40.380963 4651 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 11 04:51:40 crc kubenswrapper[4651]: E1011 04:51:40.380989 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-11 04:51:41.380982452 +0000 UTC m=+22.277215248 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.387923 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-pgwvb" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.387911 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 11 04:51:40 crc kubenswrapper[4651]: W1011 04:51:40.394346 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod28e01c08_a461_4f44_a49c_4bf92fd3a2ce.slice/crio-65db520198da135ed934537a30d0613d7c412e2bf9d161335159f0654ffcb29a WatchSource:0}: Error finding container 65db520198da135ed934537a30d0613d7c412e2bf9d161335159f0654ffcb29a: Status 404 returned error can't find the container with id 65db520198da135ed934537a30d0613d7c412e2bf9d161335159f0654ffcb29a Oct 11 04:51:40 crc kubenswrapper[4651]: W1011 04:51:40.396921 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfbfdd781_994b_49b4_9c8e_edc0ea4145d1.slice/crio-549d35ec13efe0333a37f1ed010ef4aac9a43f1ad78d6f5cd7d37d67b373cefa WatchSource:0}: Error finding container 549d35ec13efe0333a37f1ed010ef4aac9a43f1ad78d6f5cd7d37d67b373cefa: Status 404 returned error can't find the container with id 549d35ec13efe0333a37f1ed010ef4aac9a43f1ad78d6f5cd7d37d67b373cefa Oct 11 04:51:40 crc kubenswrapper[4651]: W1011 04:51:40.414349 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod215170a8_84a9_4e15_9b0e_c1200c680f30.slice/crio-b45ed4f0d1aa6124c96ceb4e8ca28b8597a33c8049a3ccde9b0ffd527409935a WatchSource:0}: Error finding container b45ed4f0d1aa6124c96ceb4e8ca28b8597a33c8049a3ccde9b0ffd527409935a: Status 404 returned error can't find the container with id b45ed4f0d1aa6124c96ceb4e8ca28b8597a33c8049a3ccde9b0ffd527409935a Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.428714 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"519a1ae1-e964-48b0-8b61-835146df28c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wkk99\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wkk99\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-78jnv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.465010 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-phsgk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e8fb74d-013d-4103-b029-e8416d079dcf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4ftf8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-phsgk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.480943 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.480979 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 04:51:40 crc kubenswrapper[4651]: E1011 04:51:40.481095 4651 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 11 04:51:40 crc kubenswrapper[4651]: E1011 04:51:40.481114 4651 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 11 04:51:40 crc kubenswrapper[4651]: E1011 04:51:40.481126 4651 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 11 04:51:40 crc kubenswrapper[4651]: E1011 04:51:40.481165 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-11 04:51:41.481153758 +0000 UTC m=+22.377386544 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 11 04:51:40 crc kubenswrapper[4651]: E1011 04:51:40.481442 4651 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 11 04:51:40 crc kubenswrapper[4651]: E1011 04:51:40.481483 4651 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 11 04:51:40 crc kubenswrapper[4651]: E1011 04:51:40.481498 4651 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 11 04:51:40 crc kubenswrapper[4651]: E1011 04:51:40.481560 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-11 04:51:41.481542348 +0000 UTC m=+22.377775224 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.509097 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.630119 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.641006 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.641897 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.655222 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.665403 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.687601 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47b36edf-f918-4105-bb64-66bb71a4b8c4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9521c3af1580aa2a62fc760916de9dc4abdc437700567a6ae5df3a96da2e8942\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6fdcb798e11fca3b7b427f5ef0ac955a893709ffedaa472aaab6c219994e940\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff57b9678d77f617cfc792258091526c2a2f8125c240abfe0b15f693abc948eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11e12782179b96dac12bfdd3a25526e93f18da255e2d2fb0d7522e6d9c373b4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e14956f18479aa61fa0755b9c6c4f26ba7ac18f3023e177a76b9a59101b37c1\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-11T04:51:33Z\\\",\\\"message\\\":\\\"W1011 04:51:22.963111 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1011 04:51:22.963696 1 crypto.go:601] Generating new CA for check-endpoints-signer@1760158282 cert, and key in /tmp/serving-cert-3362973750/serving-signer.crt, /tmp/serving-cert-3362973750/serving-signer.key\\\\nI1011 04:51:23.197526 1 observer_polling.go:159] Starting file observer\\\\nW1011 04:51:23.200452 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1011 04:51:23.200572 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1011 04:51:23.202397 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3362973750/tls.crt::/tmp/serving-cert-3362973750/tls.key\\\\\\\"\\\\nF1011 04:51:33.422520 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f8633747abf191f040cb14f5a76529487743229338070aeca597fc93dae8b3f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55c5a1cac9b1045ca931771e5a096d29e8d986a4d9baae8d779d94f2a30b6cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55c5a1cac9b1045ca931771e5a096d29e8d986a4d9baae8d779d94f2a30b6cd3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.744114 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.766372 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"519a1ae1-e964-48b0-8b61-835146df28c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wkk99\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wkk99\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-78jnv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.807649 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wz4hw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fbfdd781-994b-49b4-9c8e-edc0ea4145d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c96nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wz4hw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.848569 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-phsgk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e8fb74d-013d-4103-b029-e8416d079dcf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4ftf8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-phsgk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.869408 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 04:51:40 crc kubenswrapper[4651]: E1011 04:51:40.869864 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.887736 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.926871 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 11 04:51:40 crc kubenswrapper[4651]: I1011 04:51:40.967201 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kwhmr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbcac3cb-b774-47ec-a86c-b22191d14d99\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7n87\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kwhmr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 11 04:51:41 crc kubenswrapper[4651]: I1011 04:51:41.004355 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-pgwvb" event={"ID":"215170a8-84a9-4e15-9b0e-c1200c680f30","Type":"ContainerStarted","Data":"1c80b1c5385cb92e140c43bc982ee7cda6406eac9dfe8d9449eaf45332ccc506"} Oct 11 04:51:41 crc kubenswrapper[4651]: I1011 04:51:41.004615 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-pgwvb" event={"ID":"215170a8-84a9-4e15-9b0e-c1200c680f30","Type":"ContainerStarted","Data":"b45ed4f0d1aa6124c96ceb4e8ca28b8597a33c8049a3ccde9b0ffd527409935a"} Oct 11 04:51:41 crc kubenswrapper[4651]: I1011 04:51:41.006361 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" event={"ID":"519a1ae1-e964-48b0-8b61-835146df28c1","Type":"ContainerStarted","Data":"3f12e36a570de649e1df8107ea828cd8e65c18e111de191d3796fc1f3134e43c"} Oct 11 04:51:41 crc kubenswrapper[4651]: I1011 04:51:41.006425 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" event={"ID":"519a1ae1-e964-48b0-8b61-835146df28c1","Type":"ContainerStarted","Data":"583486a7d6efd8d25aaeac78ea92f84834730c641516b6ade77aeda2baef58e9"} Oct 11 04:51:41 crc kubenswrapper[4651]: I1011 04:51:41.006440 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" event={"ID":"519a1ae1-e964-48b0-8b61-835146df28c1","Type":"ContainerStarted","Data":"a8c7a9a5b7b48e107d162527871c447423f720a1b9ce144ef1e5460b68107f4f"} Oct 11 04:51:41 crc kubenswrapper[4651]: I1011 04:51:41.007605 4651 generic.go:334] "Generic (PLEG): container finished" podID="28e01c08-a461-4f44-a49c-4bf92fd3a2ce" containerID="ccd6180f4c5665d62ee9458795318ef2474c1938d19e6cbfec9c2174fe10e020" exitCode=0 Oct 11 04:51:41 crc kubenswrapper[4651]: I1011 04:51:41.007652 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" event={"ID":"28e01c08-a461-4f44-a49c-4bf92fd3a2ce","Type":"ContainerDied","Data":"ccd6180f4c5665d62ee9458795318ef2474c1938d19e6cbfec9c2174fe10e020"} Oct 11 04:51:41 crc kubenswrapper[4651]: I1011 04:51:41.007669 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" event={"ID":"28e01c08-a461-4f44-a49c-4bf92fd3a2ce","Type":"ContainerStarted","Data":"65db520198da135ed934537a30d0613d7c412e2bf9d161335159f0654ffcb29a"} Oct 11 04:51:41 crc kubenswrapper[4651]: I1011 04:51:41.011101 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"6e5b8526fed77f2e541d6793dc36a902daebce60d367f28f9efb45f7fabb44fc"} Oct 11 04:51:41 crc kubenswrapper[4651]: I1011 04:51:41.011128 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"e278c9a5d14f026733737226a6ffc91202b760e48622d5d0031678981fa5f857"} Oct 11 04:51:41 crc kubenswrapper[4651]: I1011 04:51:41.011139 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"9fa75de0e8e2fdb4d06e23af8e6490e198cc927eed7f684bccebff223b29b0af"} Oct 11 04:51:41 crc kubenswrapper[4651]: I1011 04:51:41.013115 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"792c74de8e44c58a861ce6d23b82a63e31b5f7b2357082117fee0ab4cb224fc0"} Oct 11 04:51:41 crc kubenswrapper[4651]: I1011 04:51:41.014698 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"d425a775a5e278ee77231764e58cac3441e04c383eee54f69ba84488ca640eae"} Oct 11 04:51:41 crc kubenswrapper[4651]: I1011 04:51:41.014807 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"31c08851de25529aa13935b430e40df1f8c5951d689031cd079f0e8a12eff696"} Oct 11 04:51:41 crc kubenswrapper[4651]: I1011 04:51:41.015839 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-wz4hw" event={"ID":"fbfdd781-994b-49b4-9c8e-edc0ea4145d1","Type":"ContainerStarted","Data":"2d37b5524c49533e7dad8b61f0e20b7d1e37b7ae1afc5b9bfe9146a0744202a2"} Oct 11 04:51:41 crc kubenswrapper[4651]: I1011 04:51:41.015881 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-wz4hw" event={"ID":"fbfdd781-994b-49b4-9c8e-edc0ea4145d1","Type":"ContainerStarted","Data":"549d35ec13efe0333a37f1ed010ef4aac9a43f1ad78d6f5cd7d37d67b373cefa"} Oct 11 04:51:41 crc kubenswrapper[4651]: I1011 04:51:41.016990 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-phsgk" event={"ID":"9e8fb74d-013d-4103-b029-e8416d079dcf","Type":"ContainerStarted","Data":"b7407b7da9820bbcdac4ce9c49c4520834a466b0db187c5f861972713ba1583f"} Oct 11 04:51:41 crc kubenswrapper[4651]: I1011 04:51:41.017039 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-phsgk" event={"ID":"9e8fb74d-013d-4103-b029-e8416d079dcf","Type":"ContainerStarted","Data":"39e932265fc80fa2ec0717b116e07b27b39be35263635dbb4f3411176075ebc7"} Oct 11 04:51:41 crc kubenswrapper[4651]: I1011 04:51:41.017098 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6zt9s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 11 04:51:41 crc kubenswrapper[4651]: I1011 04:51:41.018841 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-kwhmr" event={"ID":"dbcac3cb-b774-47ec-a86c-b22191d14d99","Type":"ContainerStarted","Data":"52a0f9e11cf4e8c69d5b356b702ce9df8cbc935daadadf531004a23e5f6dad65"} Oct 11 04:51:41 crc kubenswrapper[4651]: I1011 04:51:41.018868 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-kwhmr" event={"ID":"dbcac3cb-b774-47ec-a86c-b22191d14d99","Type":"ContainerStarted","Data":"c504c985ff1c714d9aeab178e4461544e133264680dd5676e7291a08a4a36fa4"} Oct 11 04:51:41 crc kubenswrapper[4651]: I1011 04:51:41.048423 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pgwvb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"215170a8-84a9-4e15-9b0e-c1200c680f30\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pgwvb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 11 04:51:41 crc kubenswrapper[4651]: I1011 04:51:41.090562 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pgwvb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"215170a8-84a9-4e15-9b0e-c1200c680f30\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c80b1c5385cb92e140c43bc982ee7cda6406eac9dfe8d9449eaf45332ccc506\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pgwvb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:41Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:41 crc kubenswrapper[4651]: I1011 04:51:41.131221 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:41Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:41 crc kubenswrapper[4651]: I1011 04:51:41.168110 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kwhmr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbcac3cb-b774-47ec-a86c-b22191d14d99\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52a0f9e11cf4e8c69d5b356b702ce9df8cbc935daadadf531004a23e5f6dad65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7n87\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kwhmr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:41Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:41 crc kubenswrapper[4651]: I1011 04:51:41.213081 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccd6180f4c5665d62ee9458795318ef2474c1938d19e6cbfec9c2174fe10e020\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ccd6180f4c5665d62ee9458795318ef2474c1938d19e6cbfec9c2174fe10e020\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6zt9s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:41Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:41 crc kubenswrapper[4651]: I1011 04:51:41.249380 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d425a775a5e278ee77231764e58cac3441e04c383eee54f69ba84488ca640eae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:41Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:41 crc kubenswrapper[4651]: I1011 04:51:41.288531 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:41Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:41 crc kubenswrapper[4651]: I1011 04:51:41.329339 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e5b8526fed77f2e541d6793dc36a902daebce60d367f28f9efb45f7fabb44fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e278c9a5d14f026733737226a6ffc91202b760e48622d5d0031678981fa5f857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:41Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:41 crc kubenswrapper[4651]: I1011 04:51:41.372542 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:41Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:41 crc kubenswrapper[4651]: I1011 04:51:41.387968 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 04:51:41 crc kubenswrapper[4651]: I1011 04:51:41.388099 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 04:51:41 crc kubenswrapper[4651]: E1011 04:51:41.388132 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 04:51:43.388102026 +0000 UTC m=+24.284334832 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 04:51:41 crc kubenswrapper[4651]: E1011 04:51:41.388191 4651 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 11 04:51:41 crc kubenswrapper[4651]: I1011 04:51:41.388192 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 04:51:41 crc kubenswrapper[4651]: E1011 04:51:41.388238 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-11 04:51:43.388225589 +0000 UTC m=+24.284458385 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 11 04:51:41 crc kubenswrapper[4651]: E1011 04:51:41.388345 4651 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 11 04:51:41 crc kubenswrapper[4651]: E1011 04:51:41.388444 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-11 04:51:43.388426334 +0000 UTC m=+24.284659130 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 11 04:51:41 crc kubenswrapper[4651]: I1011 04:51:41.408890 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"519a1ae1-e964-48b0-8b61-835146df28c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f12e36a570de649e1df8107ea828cd8e65c18e111de191d3796fc1f3134e43c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wkk99\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://583486a7d6efd8d25aaeac78ea92f84834730c641516b6ade77aeda2baef58e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wkk99\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-78jnv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:41Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:41 crc kubenswrapper[4651]: I1011 04:51:41.450091 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wz4hw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fbfdd781-994b-49b4-9c8e-edc0ea4145d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d37b5524c49533e7dad8b61f0e20b7d1e37b7ae1afc5b9bfe9146a0744202a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c96nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wz4hw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:41Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:41 crc kubenswrapper[4651]: I1011 04:51:41.488997 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 04:51:41 crc kubenswrapper[4651]: I1011 04:51:41.489059 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 04:51:41 crc kubenswrapper[4651]: E1011 04:51:41.489154 4651 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 11 04:51:41 crc kubenswrapper[4651]: E1011 04:51:41.489155 4651 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 11 04:51:41 crc kubenswrapper[4651]: E1011 04:51:41.489185 4651 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 11 04:51:41 crc kubenswrapper[4651]: E1011 04:51:41.489198 4651 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 11 04:51:41 crc kubenswrapper[4651]: E1011 04:51:41.489250 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-11 04:51:43.489232935 +0000 UTC m=+24.385465731 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 11 04:51:41 crc kubenswrapper[4651]: E1011 04:51:41.489170 4651 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 11 04:51:41 crc kubenswrapper[4651]: E1011 04:51:41.489279 4651 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 11 04:51:41 crc kubenswrapper[4651]: E1011 04:51:41.489319 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-11 04:51:43.489308847 +0000 UTC m=+24.385541643 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 11 04:51:41 crc kubenswrapper[4651]: I1011 04:51:41.489977 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47b36edf-f918-4105-bb64-66bb71a4b8c4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9521c3af1580aa2a62fc760916de9dc4abdc437700567a6ae5df3a96da2e8942\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6fdcb798e11fca3b7b427f5ef0ac955a893709ffedaa472aaab6c219994e940\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff57b9678d77f617cfc792258091526c2a2f8125c240abfe0b15f693abc948eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11e12782179b96dac12bfdd3a25526e93f18da255e2d2fb0d7522e6d9c373b4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e14956f18479aa61fa0755b9c6c4f26ba7ac18f3023e177a76b9a59101b37c1\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-11T04:51:33Z\\\",\\\"message\\\":\\\"W1011 04:51:22.963111 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1011 04:51:22.963696 1 crypto.go:601] Generating new CA for check-endpoints-signer@1760158282 cert, and key in /tmp/serving-cert-3362973750/serving-signer.crt, /tmp/serving-cert-3362973750/serving-signer.key\\\\nI1011 04:51:23.197526 1 observer_polling.go:159] Starting file observer\\\\nW1011 04:51:23.200452 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1011 04:51:23.200572 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1011 04:51:23.202397 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3362973750/tls.crt::/tmp/serving-cert-3362973750/tls.key\\\\\\\"\\\\nF1011 04:51:33.422520 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f8633747abf191f040cb14f5a76529487743229338070aeca597fc93dae8b3f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55c5a1cac9b1045ca931771e5a096d29e8d986a4d9baae8d779d94f2a30b6cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55c5a1cac9b1045ca931771e5a096d29e8d986a4d9baae8d779d94f2a30b6cd3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:41Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:41 crc kubenswrapper[4651]: I1011 04:51:41.531493 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"744aa461-2868-440d-a1b9-6d30c0c50b56\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aabba06dd47ad528f830a47cc79ddabb3789d24ebed62a6e8f976df1b156ef1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb996f17546fc77ac1b8ed27428aaa6060822daa0156cf016dbb24ed278403a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76ee2612930be6596c0186cc000d80039b52e225fd64102002ed9316a0605521\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c6263dc47a8284e9849da1fb15c4788174252f3637665ac6e4b19298ca90c587\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:41Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:41 crc kubenswrapper[4651]: I1011 04:51:41.567425 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-phsgk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e8fb74d-013d-4103-b029-e8416d079dcf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b7407b7da9820bbcdac4ce9c49c4520834a466b0db187c5f861972713ba1583f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4ftf8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-phsgk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:41Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:41 crc kubenswrapper[4651]: I1011 04:51:41.609003 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:41Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:41 crc kubenswrapper[4651]: I1011 04:51:41.869220 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 04:51:41 crc kubenswrapper[4651]: E1011 04:51:41.869623 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 04:51:41 crc kubenswrapper[4651]: I1011 04:51:41.869316 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 04:51:41 crc kubenswrapper[4651]: E1011 04:51:41.870017 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 04:51:41 crc kubenswrapper[4651]: I1011 04:51:41.882659 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Oct 11 04:51:42 crc kubenswrapper[4651]: I1011 04:51:42.023804 4651 generic.go:334] "Generic (PLEG): container finished" podID="215170a8-84a9-4e15-9b0e-c1200c680f30" containerID="1c80b1c5385cb92e140c43bc982ee7cda6406eac9dfe8d9449eaf45332ccc506" exitCode=0 Oct 11 04:51:42 crc kubenswrapper[4651]: I1011 04:51:42.023879 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-pgwvb" event={"ID":"215170a8-84a9-4e15-9b0e-c1200c680f30","Type":"ContainerDied","Data":"1c80b1c5385cb92e140c43bc982ee7cda6406eac9dfe8d9449eaf45332ccc506"} Oct 11 04:51:42 crc kubenswrapper[4651]: I1011 04:51:42.026899 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" event={"ID":"28e01c08-a461-4f44-a49c-4bf92fd3a2ce","Type":"ContainerStarted","Data":"3eed8335754437c935f07223bf4c4f40f9ab0bbdc9a86b7610f7f6fd55140e37"} Oct 11 04:51:42 crc kubenswrapper[4651]: I1011 04:51:42.026948 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" event={"ID":"28e01c08-a461-4f44-a49c-4bf92fd3a2ce","Type":"ContainerStarted","Data":"788d6a46d740de869fe7cd1d28479f8115a3515c7bd2482e06cbc4dba7b27fac"} Oct 11 04:51:42 crc kubenswrapper[4651]: I1011 04:51:42.026968 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" event={"ID":"28e01c08-a461-4f44-a49c-4bf92fd3a2ce","Type":"ContainerStarted","Data":"0fd0aaae2760a1c934aed54b8e07528f0f78db1149c9e1f8674bec90b61a7078"} Oct 11 04:51:42 crc kubenswrapper[4651]: I1011 04:51:42.026985 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" event={"ID":"28e01c08-a461-4f44-a49c-4bf92fd3a2ce","Type":"ContainerStarted","Data":"23b3fde15e637081cbf5fad2e55ccea1f0fe63d39ff5f82a60ce05ffeef9a837"} Oct 11 04:51:42 crc kubenswrapper[4651]: I1011 04:51:42.027002 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" event={"ID":"28e01c08-a461-4f44-a49c-4bf92fd3a2ce","Type":"ContainerStarted","Data":"99f927067aabe4690e99fe50a069afdf68b22d950a8ae141235571ced468c44e"} Oct 11 04:51:42 crc kubenswrapper[4651]: I1011 04:51:42.046317 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d425a775a5e278ee77231764e58cac3441e04c383eee54f69ba84488ca640eae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:42Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:42 crc kubenswrapper[4651]: I1011 04:51:42.075394 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:42Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:42 crc kubenswrapper[4651]: I1011 04:51:42.088523 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e5b8526fed77f2e541d6793dc36a902daebce60d367f28f9efb45f7fabb44fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e278c9a5d14f026733737226a6ffc91202b760e48622d5d0031678981fa5f857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:42Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:42 crc kubenswrapper[4651]: I1011 04:51:42.099843 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wz4hw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fbfdd781-994b-49b4-9c8e-edc0ea4145d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d37b5524c49533e7dad8b61f0e20b7d1e37b7ae1afc5b9bfe9146a0744202a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c96nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wz4hw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:42Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:42 crc kubenswrapper[4651]: I1011 04:51:42.119096 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47b36edf-f918-4105-bb64-66bb71a4b8c4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9521c3af1580aa2a62fc760916de9dc4abdc437700567a6ae5df3a96da2e8942\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6fdcb798e11fca3b7b427f5ef0ac955a893709ffedaa472aaab6c219994e940\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff57b9678d77f617cfc792258091526c2a2f8125c240abfe0b15f693abc948eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11e12782179b96dac12bfdd3a25526e93f18da255e2d2fb0d7522e6d9c373b4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e14956f18479aa61fa0755b9c6c4f26ba7ac18f3023e177a76b9a59101b37c1\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-11T04:51:33Z\\\",\\\"message\\\":\\\"W1011 04:51:22.963111 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1011 04:51:22.963696 1 crypto.go:601] Generating new CA for check-endpoints-signer@1760158282 cert, and key in /tmp/serving-cert-3362973750/serving-signer.crt, /tmp/serving-cert-3362973750/serving-signer.key\\\\nI1011 04:51:23.197526 1 observer_polling.go:159] Starting file observer\\\\nW1011 04:51:23.200452 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1011 04:51:23.200572 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1011 04:51:23.202397 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3362973750/tls.crt::/tmp/serving-cert-3362973750/tls.key\\\\\\\"\\\\nF1011 04:51:33.422520 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f8633747abf191f040cb14f5a76529487743229338070aeca597fc93dae8b3f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55c5a1cac9b1045ca931771e5a096d29e8d986a4d9baae8d779d94f2a30b6cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55c5a1cac9b1045ca931771e5a096d29e8d986a4d9baae8d779d94f2a30b6cd3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:42Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:42 crc kubenswrapper[4651]: I1011 04:51:42.134974 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"744aa461-2868-440d-a1b9-6d30c0c50b56\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aabba06dd47ad528f830a47cc79ddabb3789d24ebed62a6e8f976df1b156ef1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb996f17546fc77ac1b8ed27428aaa6060822daa0156cf016dbb24ed278403a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76ee2612930be6596c0186cc000d80039b52e225fd64102002ed9316a0605521\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c6263dc47a8284e9849da1fb15c4788174252f3637665ac6e4b19298ca90c587\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:42Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:42 crc kubenswrapper[4651]: I1011 04:51:42.149519 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:42Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:42 crc kubenswrapper[4651]: I1011 04:51:42.162485 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"519a1ae1-e964-48b0-8b61-835146df28c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f12e36a570de649e1df8107ea828cd8e65c18e111de191d3796fc1f3134e43c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wkk99\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://583486a7d6efd8d25aaeac78ea92f84834730c641516b6ade77aeda2baef58e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wkk99\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-78jnv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:42Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:42 crc kubenswrapper[4651]: I1011 04:51:42.174431 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-phsgk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e8fb74d-013d-4103-b029-e8416d079dcf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b7407b7da9820bbcdac4ce9c49c4520834a466b0db187c5f861972713ba1583f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4ftf8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-phsgk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:42Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:42 crc kubenswrapper[4651]: I1011 04:51:42.188761 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:42Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:42 crc kubenswrapper[4651]: I1011 04:51:42.202963 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:42Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:42 crc kubenswrapper[4651]: I1011 04:51:42.215981 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kwhmr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbcac3cb-b774-47ec-a86c-b22191d14d99\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52a0f9e11cf4e8c69d5b356b702ce9df8cbc935daadadf531004a23e5f6dad65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7n87\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kwhmr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:42Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:42 crc kubenswrapper[4651]: I1011 04:51:42.236468 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccd6180f4c5665d62ee9458795318ef2474c1938d19e6cbfec9c2174fe10e020\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ccd6180f4c5665d62ee9458795318ef2474c1938d19e6cbfec9c2174fe10e020\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6zt9s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:42Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:42 crc kubenswrapper[4651]: I1011 04:51:42.266608 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pgwvb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"215170a8-84a9-4e15-9b0e-c1200c680f30\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c80b1c5385cb92e140c43bc982ee7cda6406eac9dfe8d9449eaf45332ccc506\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c80b1c5385cb92e140c43bc982ee7cda6406eac9dfe8d9449eaf45332ccc506\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pgwvb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:42Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:42 crc kubenswrapper[4651]: I1011 04:51:42.868684 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 04:51:42 crc kubenswrapper[4651]: E1011 04:51:42.869069 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 04:51:43 crc kubenswrapper[4651]: I1011 04:51:43.034415 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" event={"ID":"28e01c08-a461-4f44-a49c-4bf92fd3a2ce","Type":"ContainerStarted","Data":"68954082d4369b9b4d95ba1f15e19593dc330b698562be413055b2ba012b5f8c"} Oct 11 04:51:43 crc kubenswrapper[4651]: I1011 04:51:43.040851 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-pgwvb" event={"ID":"215170a8-84a9-4e15-9b0e-c1200c680f30","Type":"ContainerDied","Data":"1575fc8fd03d4047a3ed1c0c9df99b77e40296b876d2913a790128f39733d45e"} Oct 11 04:51:43 crc kubenswrapper[4651]: I1011 04:51:43.040851 4651 generic.go:334] "Generic (PLEG): container finished" podID="215170a8-84a9-4e15-9b0e-c1200c680f30" containerID="1575fc8fd03d4047a3ed1c0c9df99b77e40296b876d2913a790128f39733d45e" exitCode=0 Oct 11 04:51:43 crc kubenswrapper[4651]: I1011 04:51:43.043117 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"55e4c9c2d46f67bb0f8ccdb2de27dc82f0280476613b3b30d587ff6f5784e787"} Oct 11 04:51:43 crc kubenswrapper[4651]: I1011 04:51:43.064105 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"744aa461-2868-440d-a1b9-6d30c0c50b56\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aabba06dd47ad528f830a47cc79ddabb3789d24ebed62a6e8f976df1b156ef1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb996f17546fc77ac1b8ed27428aaa6060822daa0156cf016dbb24ed278403a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76ee2612930be6596c0186cc000d80039b52e225fd64102002ed9316a0605521\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c6263dc47a8284e9849da1fb15c4788174252f3637665ac6e4b19298ca90c587\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:43Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:43 crc kubenswrapper[4651]: I1011 04:51:43.089574 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:43Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:43 crc kubenswrapper[4651]: I1011 04:51:43.104476 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"519a1ae1-e964-48b0-8b61-835146df28c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f12e36a570de649e1df8107ea828cd8e65c18e111de191d3796fc1f3134e43c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wkk99\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://583486a7d6efd8d25aaeac78ea92f84834730c641516b6ade77aeda2baef58e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wkk99\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-78jnv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:43Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:43 crc kubenswrapper[4651]: I1011 04:51:43.119531 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wz4hw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fbfdd781-994b-49b4-9c8e-edc0ea4145d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d37b5524c49533e7dad8b61f0e20b7d1e37b7ae1afc5b9bfe9146a0744202a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c96nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wz4hw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:43Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:43 crc kubenswrapper[4651]: I1011 04:51:43.133835 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47b36edf-f918-4105-bb64-66bb71a4b8c4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9521c3af1580aa2a62fc760916de9dc4abdc437700567a6ae5df3a96da2e8942\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6fdcb798e11fca3b7b427f5ef0ac955a893709ffedaa472aaab6c219994e940\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff57b9678d77f617cfc792258091526c2a2f8125c240abfe0b15f693abc948eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11e12782179b96dac12bfdd3a25526e93f18da255e2d2fb0d7522e6d9c373b4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e14956f18479aa61fa0755b9c6c4f26ba7ac18f3023e177a76b9a59101b37c1\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-11T04:51:33Z\\\",\\\"message\\\":\\\"W1011 04:51:22.963111 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1011 04:51:22.963696 1 crypto.go:601] Generating new CA for check-endpoints-signer@1760158282 cert, and key in /tmp/serving-cert-3362973750/serving-signer.crt, /tmp/serving-cert-3362973750/serving-signer.key\\\\nI1011 04:51:23.197526 1 observer_polling.go:159] Starting file observer\\\\nW1011 04:51:23.200452 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1011 04:51:23.200572 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1011 04:51:23.202397 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3362973750/tls.crt::/tmp/serving-cert-3362973750/tls.key\\\\\\\"\\\\nF1011 04:51:33.422520 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f8633747abf191f040cb14f5a76529487743229338070aeca597fc93dae8b3f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55c5a1cac9b1045ca931771e5a096d29e8d986a4d9baae8d779d94f2a30b6cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55c5a1cac9b1045ca931771e5a096d29e8d986a4d9baae8d779d94f2a30b6cd3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:43Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:43 crc kubenswrapper[4651]: I1011 04:51:43.144463 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-phsgk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e8fb74d-013d-4103-b029-e8416d079dcf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b7407b7da9820bbcdac4ce9c49c4520834a466b0db187c5f861972713ba1583f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4ftf8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-phsgk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:43Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:43 crc kubenswrapper[4651]: I1011 04:51:43.159145 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:43Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:43 crc kubenswrapper[4651]: I1011 04:51:43.169931 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kwhmr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbcac3cb-b774-47ec-a86c-b22191d14d99\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52a0f9e11cf4e8c69d5b356b702ce9df8cbc935daadadf531004a23e5f6dad65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7n87\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kwhmr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:43Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:43 crc kubenswrapper[4651]: I1011 04:51:43.188391 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccd6180f4c5665d62ee9458795318ef2474c1938d19e6cbfec9c2174fe10e020\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ccd6180f4c5665d62ee9458795318ef2474c1938d19e6cbfec9c2174fe10e020\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6zt9s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:43Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:43 crc kubenswrapper[4651]: I1011 04:51:43.209453 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pgwvb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"215170a8-84a9-4e15-9b0e-c1200c680f30\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c80b1c5385cb92e140c43bc982ee7cda6406eac9dfe8d9449eaf45332ccc506\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c80b1c5385cb92e140c43bc982ee7cda6406eac9dfe8d9449eaf45332ccc506\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1575fc8fd03d4047a3ed1c0c9df99b77e40296b876d2913a790128f39733d45e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1575fc8fd03d4047a3ed1c0c9df99b77e40296b876d2913a790128f39733d45e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pgwvb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:43Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:43 crc kubenswrapper[4651]: I1011 04:51:43.221530 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:43Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:43 crc kubenswrapper[4651]: I1011 04:51:43.234762 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:43Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:43 crc kubenswrapper[4651]: I1011 04:51:43.250373 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e5b8526fed77f2e541d6793dc36a902daebce60d367f28f9efb45f7fabb44fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e278c9a5d14f026733737226a6ffc91202b760e48622d5d0031678981fa5f857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:43Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:43 crc kubenswrapper[4651]: I1011 04:51:43.262890 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d425a775a5e278ee77231764e58cac3441e04c383eee54f69ba84488ca640eae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:43Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:43 crc kubenswrapper[4651]: I1011 04:51:43.276228 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-phsgk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e8fb74d-013d-4103-b029-e8416d079dcf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b7407b7da9820bbcdac4ce9c49c4520834a466b0db187c5f861972713ba1583f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4ftf8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-phsgk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:43Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:43 crc kubenswrapper[4651]: I1011 04:51:43.286488 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:43Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:43 crc kubenswrapper[4651]: I1011 04:51:43.296482 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:43Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:43 crc kubenswrapper[4651]: I1011 04:51:43.304839 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kwhmr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbcac3cb-b774-47ec-a86c-b22191d14d99\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52a0f9e11cf4e8c69d5b356b702ce9df8cbc935daadadf531004a23e5f6dad65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7n87\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kwhmr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:43Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:43 crc kubenswrapper[4651]: I1011 04:51:43.319861 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccd6180f4c5665d62ee9458795318ef2474c1938d19e6cbfec9c2174fe10e020\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ccd6180f4c5665d62ee9458795318ef2474c1938d19e6cbfec9c2174fe10e020\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6zt9s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:43Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:43 crc kubenswrapper[4651]: I1011 04:51:43.333017 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pgwvb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"215170a8-84a9-4e15-9b0e-c1200c680f30\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c80b1c5385cb92e140c43bc982ee7cda6406eac9dfe8d9449eaf45332ccc506\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c80b1c5385cb92e140c43bc982ee7cda6406eac9dfe8d9449eaf45332ccc506\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1575fc8fd03d4047a3ed1c0c9df99b77e40296b876d2913a790128f39733d45e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1575fc8fd03d4047a3ed1c0c9df99b77e40296b876d2913a790128f39733d45e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pgwvb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:43Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:43 crc kubenswrapper[4651]: I1011 04:51:43.344412 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d425a775a5e278ee77231764e58cac3441e04c383eee54f69ba84488ca640eae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:43Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:43 crc kubenswrapper[4651]: I1011 04:51:43.353373 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55e4c9c2d46f67bb0f8ccdb2de27dc82f0280476613b3b30d587ff6f5784e787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:43Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:43 crc kubenswrapper[4651]: I1011 04:51:43.364244 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e5b8526fed77f2e541d6793dc36a902daebce60d367f28f9efb45f7fabb44fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e278c9a5d14f026733737226a6ffc91202b760e48622d5d0031678981fa5f857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:43Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:43 crc kubenswrapper[4651]: I1011 04:51:43.375956 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:43Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:43 crc kubenswrapper[4651]: I1011 04:51:43.390959 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"519a1ae1-e964-48b0-8b61-835146df28c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f12e36a570de649e1df8107ea828cd8e65c18e111de191d3796fc1f3134e43c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wkk99\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://583486a7d6efd8d25aaeac78ea92f84834730c641516b6ade77aeda2baef58e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wkk99\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-78jnv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:43Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:43 crc kubenswrapper[4651]: I1011 04:51:43.403679 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wz4hw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fbfdd781-994b-49b4-9c8e-edc0ea4145d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d37b5524c49533e7dad8b61f0e20b7d1e37b7ae1afc5b9bfe9146a0744202a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c96nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wz4hw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:43Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:43 crc kubenswrapper[4651]: I1011 04:51:43.407222 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 04:51:43 crc kubenswrapper[4651]: I1011 04:51:43.407351 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 04:51:43 crc kubenswrapper[4651]: I1011 04:51:43.407429 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 04:51:43 crc kubenswrapper[4651]: E1011 04:51:43.407606 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 04:51:47.407548463 +0000 UTC m=+28.303781299 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 04:51:43 crc kubenswrapper[4651]: E1011 04:51:43.407691 4651 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 11 04:51:43 crc kubenswrapper[4651]: E1011 04:51:43.407799 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-11 04:51:47.407782209 +0000 UTC m=+28.304015005 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 11 04:51:43 crc kubenswrapper[4651]: E1011 04:51:43.407717 4651 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 11 04:51:43 crc kubenswrapper[4651]: E1011 04:51:43.408013 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-11 04:51:47.407989064 +0000 UTC m=+28.304221900 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 11 04:51:43 crc kubenswrapper[4651]: I1011 04:51:43.418935 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47b36edf-f918-4105-bb64-66bb71a4b8c4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9521c3af1580aa2a62fc760916de9dc4abdc437700567a6ae5df3a96da2e8942\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6fdcb798e11fca3b7b427f5ef0ac955a893709ffedaa472aaab6c219994e940\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff57b9678d77f617cfc792258091526c2a2f8125c240abfe0b15f693abc948eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11e12782179b96dac12bfdd3a25526e93f18da255e2d2fb0d7522e6d9c373b4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e14956f18479aa61fa0755b9c6c4f26ba7ac18f3023e177a76b9a59101b37c1\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-11T04:51:33Z\\\",\\\"message\\\":\\\"W1011 04:51:22.963111 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1011 04:51:22.963696 1 crypto.go:601] Generating new CA for check-endpoints-signer@1760158282 cert, and key in /tmp/serving-cert-3362973750/serving-signer.crt, /tmp/serving-cert-3362973750/serving-signer.key\\\\nI1011 04:51:23.197526 1 observer_polling.go:159] Starting file observer\\\\nW1011 04:51:23.200452 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1011 04:51:23.200572 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1011 04:51:23.202397 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3362973750/tls.crt::/tmp/serving-cert-3362973750/tls.key\\\\\\\"\\\\nF1011 04:51:33.422520 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f8633747abf191f040cb14f5a76529487743229338070aeca597fc93dae8b3f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55c5a1cac9b1045ca931771e5a096d29e8d986a4d9baae8d779d94f2a30b6cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55c5a1cac9b1045ca931771e5a096d29e8d986a4d9baae8d779d94f2a30b6cd3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:43Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:43 crc kubenswrapper[4651]: I1011 04:51:43.436226 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"744aa461-2868-440d-a1b9-6d30c0c50b56\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aabba06dd47ad528f830a47cc79ddabb3789d24ebed62a6e8f976df1b156ef1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb996f17546fc77ac1b8ed27428aaa6060822daa0156cf016dbb24ed278403a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76ee2612930be6596c0186cc000d80039b52e225fd64102002ed9316a0605521\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c6263dc47a8284e9849da1fb15c4788174252f3637665ac6e4b19298ca90c587\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:43Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:43 crc kubenswrapper[4651]: I1011 04:51:43.508919 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 04:51:43 crc kubenswrapper[4651]: I1011 04:51:43.509012 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 04:51:43 crc kubenswrapper[4651]: E1011 04:51:43.509143 4651 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 11 04:51:43 crc kubenswrapper[4651]: E1011 04:51:43.509182 4651 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 11 04:51:43 crc kubenswrapper[4651]: E1011 04:51:43.509203 4651 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 11 04:51:43 crc kubenswrapper[4651]: E1011 04:51:43.509271 4651 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 11 04:51:43 crc kubenswrapper[4651]: E1011 04:51:43.509305 4651 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 11 04:51:43 crc kubenswrapper[4651]: E1011 04:51:43.509331 4651 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 11 04:51:43 crc kubenswrapper[4651]: E1011 04:51:43.509275 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-11 04:51:47.509253167 +0000 UTC m=+28.405486003 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 11 04:51:43 crc kubenswrapper[4651]: E1011 04:51:43.509426 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-11 04:51:47.509398151 +0000 UTC m=+28.405630997 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 11 04:51:43 crc kubenswrapper[4651]: I1011 04:51:43.528377 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 11 04:51:43 crc kubenswrapper[4651]: I1011 04:51:43.535180 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 11 04:51:43 crc kubenswrapper[4651]: I1011 04:51:43.550376 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47b36edf-f918-4105-bb64-66bb71a4b8c4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9521c3af1580aa2a62fc760916de9dc4abdc437700567a6ae5df3a96da2e8942\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6fdcb798e11fca3b7b427f5ef0ac955a893709ffedaa472aaab6c219994e940\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff57b9678d77f617cfc792258091526c2a2f8125c240abfe0b15f693abc948eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11e12782179b96dac12bfdd3a25526e93f18da255e2d2fb0d7522e6d9c373b4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e14956f18479aa61fa0755b9c6c4f26ba7ac18f3023e177a76b9a59101b37c1\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-11T04:51:33Z\\\",\\\"message\\\":\\\"W1011 04:51:22.963111 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1011 04:51:22.963696 1 crypto.go:601] Generating new CA for check-endpoints-signer@1760158282 cert, and key in /tmp/serving-cert-3362973750/serving-signer.crt, /tmp/serving-cert-3362973750/serving-signer.key\\\\nI1011 04:51:23.197526 1 observer_polling.go:159] Starting file observer\\\\nW1011 04:51:23.200452 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1011 04:51:23.200572 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1011 04:51:23.202397 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3362973750/tls.crt::/tmp/serving-cert-3362973750/tls.key\\\\\\\"\\\\nF1011 04:51:33.422520 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f8633747abf191f040cb14f5a76529487743229338070aeca597fc93dae8b3f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55c5a1cac9b1045ca931771e5a096d29e8d986a4d9baae8d779d94f2a30b6cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55c5a1cac9b1045ca931771e5a096d29e8d986a4d9baae8d779d94f2a30b6cd3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:43Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:43 crc kubenswrapper[4651]: I1011 04:51:43.569765 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"744aa461-2868-440d-a1b9-6d30c0c50b56\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aabba06dd47ad528f830a47cc79ddabb3789d24ebed62a6e8f976df1b156ef1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb996f17546fc77ac1b8ed27428aaa6060822daa0156cf016dbb24ed278403a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76ee2612930be6596c0186cc000d80039b52e225fd64102002ed9316a0605521\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c6263dc47a8284e9849da1fb15c4788174252f3637665ac6e4b19298ca90c587\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:43Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:43 crc kubenswrapper[4651]: I1011 04:51:43.586962 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:43Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:43 crc kubenswrapper[4651]: I1011 04:51:43.600844 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"519a1ae1-e964-48b0-8b61-835146df28c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f12e36a570de649e1df8107ea828cd8e65c18e111de191d3796fc1f3134e43c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wkk99\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://583486a7d6efd8d25aaeac78ea92f84834730c641516b6ade77aeda2baef58e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wkk99\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-78jnv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:43Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:43 crc kubenswrapper[4651]: I1011 04:51:43.615285 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wz4hw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fbfdd781-994b-49b4-9c8e-edc0ea4145d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d37b5524c49533e7dad8b61f0e20b7d1e37b7ae1afc5b9bfe9146a0744202a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c96nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wz4hw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:43Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:43 crc kubenswrapper[4651]: I1011 04:51:43.626631 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-phsgk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e8fb74d-013d-4103-b029-e8416d079dcf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b7407b7da9820bbcdac4ce9c49c4520834a466b0db187c5f861972713ba1583f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4ftf8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-phsgk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:43Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:43 crc kubenswrapper[4651]: I1011 04:51:43.641617 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:43Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:43 crc kubenswrapper[4651]: I1011 04:51:43.659325 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:43Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:43 crc kubenswrapper[4651]: I1011 04:51:43.668887 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kwhmr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbcac3cb-b774-47ec-a86c-b22191d14d99\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52a0f9e11cf4e8c69d5b356b702ce9df8cbc935daadadf531004a23e5f6dad65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7n87\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kwhmr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:43Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:43 crc kubenswrapper[4651]: I1011 04:51:43.698240 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccd6180f4c5665d62ee9458795318ef2474c1938d19e6cbfec9c2174fe10e020\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ccd6180f4c5665d62ee9458795318ef2474c1938d19e6cbfec9c2174fe10e020\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6zt9s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:43Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:43 crc kubenswrapper[4651]: I1011 04:51:43.732724 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pgwvb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"215170a8-84a9-4e15-9b0e-c1200c680f30\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c80b1c5385cb92e140c43bc982ee7cda6406eac9dfe8d9449eaf45332ccc506\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c80b1c5385cb92e140c43bc982ee7cda6406eac9dfe8d9449eaf45332ccc506\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1575fc8fd03d4047a3ed1c0c9df99b77e40296b876d2913a790128f39733d45e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1575fc8fd03d4047a3ed1c0c9df99b77e40296b876d2913a790128f39733d45e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pgwvb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:43Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:43 crc kubenswrapper[4651]: I1011 04:51:43.771066 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d425a775a5e278ee77231764e58cac3441e04c383eee54f69ba84488ca640eae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:43Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:43 crc kubenswrapper[4651]: I1011 04:51:43.812601 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55e4c9c2d46f67bb0f8ccdb2de27dc82f0280476613b3b30d587ff6f5784e787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:43Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:43 crc kubenswrapper[4651]: I1011 04:51:43.852772 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e5b8526fed77f2e541d6793dc36a902daebce60d367f28f9efb45f7fabb44fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e278c9a5d14f026733737226a6ffc91202b760e48622d5d0031678981fa5f857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:43Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:43 crc kubenswrapper[4651]: I1011 04:51:43.868631 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 04:51:43 crc kubenswrapper[4651]: E1011 04:51:43.868751 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 04:51:43 crc kubenswrapper[4651]: I1011 04:51:43.868644 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 04:51:43 crc kubenswrapper[4651]: E1011 04:51:43.868876 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 04:51:43 crc kubenswrapper[4651]: I1011 04:51:43.890746 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d425a775a5e278ee77231764e58cac3441e04c383eee54f69ba84488ca640eae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:43Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:43 crc kubenswrapper[4651]: I1011 04:51:43.930222 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55e4c9c2d46f67bb0f8ccdb2de27dc82f0280476613b3b30d587ff6f5784e787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:43Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:43 crc kubenswrapper[4651]: I1011 04:51:43.969062 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e5b8526fed77f2e541d6793dc36a902daebce60d367f28f9efb45f7fabb44fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e278c9a5d14f026733737226a6ffc91202b760e48622d5d0031678981fa5f857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:43Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:44 crc kubenswrapper[4651]: I1011 04:51:44.011526 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:44Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:44 crc kubenswrapper[4651]: I1011 04:51:44.017185 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 11 04:51:44 crc kubenswrapper[4651]: I1011 04:51:44.050077 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"519a1ae1-e964-48b0-8b61-835146df28c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f12e36a570de649e1df8107ea828cd8e65c18e111de191d3796fc1f3134e43c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wkk99\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://583486a7d6efd8d25aaeac78ea92f84834730c641516b6ade77aeda2baef58e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wkk99\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-78jnv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:44Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:44 crc kubenswrapper[4651]: I1011 04:51:44.051108 4651 generic.go:334] "Generic (PLEG): container finished" podID="215170a8-84a9-4e15-9b0e-c1200c680f30" containerID="626e511c110034060639be8565505dc122e834fc63a76d9c4e21dd1dbd5d5a62" exitCode=0 Oct 11 04:51:44 crc kubenswrapper[4651]: I1011 04:51:44.051203 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-pgwvb" event={"ID":"215170a8-84a9-4e15-9b0e-c1200c680f30","Type":"ContainerDied","Data":"626e511c110034060639be8565505dc122e834fc63a76d9c4e21dd1dbd5d5a62"} Oct 11 04:51:44 crc kubenswrapper[4651]: I1011 04:51:44.092599 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wz4hw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fbfdd781-994b-49b4-9c8e-edc0ea4145d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d37b5524c49533e7dad8b61f0e20b7d1e37b7ae1afc5b9bfe9146a0744202a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c96nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wz4hw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:44Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:44 crc kubenswrapper[4651]: I1011 04:51:44.131911 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47b36edf-f918-4105-bb64-66bb71a4b8c4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9521c3af1580aa2a62fc760916de9dc4abdc437700567a6ae5df3a96da2e8942\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6fdcb798e11fca3b7b427f5ef0ac955a893709ffedaa472aaab6c219994e940\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff57b9678d77f617cfc792258091526c2a2f8125c240abfe0b15f693abc948eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11e12782179b96dac12bfdd3a25526e93f18da255e2d2fb0d7522e6d9c373b4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e14956f18479aa61fa0755b9c6c4f26ba7ac18f3023e177a76b9a59101b37c1\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-11T04:51:33Z\\\",\\\"message\\\":\\\"W1011 04:51:22.963111 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1011 04:51:22.963696 1 crypto.go:601] Generating new CA for check-endpoints-signer@1760158282 cert, and key in /tmp/serving-cert-3362973750/serving-signer.crt, /tmp/serving-cert-3362973750/serving-signer.key\\\\nI1011 04:51:23.197526 1 observer_polling.go:159] Starting file observer\\\\nW1011 04:51:23.200452 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1011 04:51:23.200572 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1011 04:51:23.202397 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3362973750/tls.crt::/tmp/serving-cert-3362973750/tls.key\\\\\\\"\\\\nF1011 04:51:33.422520 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f8633747abf191f040cb14f5a76529487743229338070aeca597fc93dae8b3f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55c5a1cac9b1045ca931771e5a096d29e8d986a4d9baae8d779d94f2a30b6cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55c5a1cac9b1045ca931771e5a096d29e8d986a4d9baae8d779d94f2a30b6cd3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:44Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:44 crc kubenswrapper[4651]: I1011 04:51:44.171376 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"744aa461-2868-440d-a1b9-6d30c0c50b56\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aabba06dd47ad528f830a47cc79ddabb3789d24ebed62a6e8f976df1b156ef1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb996f17546fc77ac1b8ed27428aaa6060822daa0156cf016dbb24ed278403a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76ee2612930be6596c0186cc000d80039b52e225fd64102002ed9316a0605521\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c6263dc47a8284e9849da1fb15c4788174252f3637665ac6e4b19298ca90c587\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:44Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:44 crc kubenswrapper[4651]: I1011 04:51:44.249087 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-phsgk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e8fb74d-013d-4103-b029-e8416d079dcf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b7407b7da9820bbcdac4ce9c49c4520834a466b0db187c5f861972713ba1583f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4ftf8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-phsgk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:44Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:44 crc kubenswrapper[4651]: I1011 04:51:44.264672 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:44Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:44 crc kubenswrapper[4651]: I1011 04:51:44.288924 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:44Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:44 crc kubenswrapper[4651]: I1011 04:51:44.329307 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kwhmr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbcac3cb-b774-47ec-a86c-b22191d14d99\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52a0f9e11cf4e8c69d5b356b702ce9df8cbc935daadadf531004a23e5f6dad65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7n87\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kwhmr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:44Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:44 crc kubenswrapper[4651]: I1011 04:51:44.377023 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccd6180f4c5665d62ee9458795318ef2474c1938d19e6cbfec9c2174fe10e020\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ccd6180f4c5665d62ee9458795318ef2474c1938d19e6cbfec9c2174fe10e020\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6zt9s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:44Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:44 crc kubenswrapper[4651]: I1011 04:51:44.415794 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pgwvb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"215170a8-84a9-4e15-9b0e-c1200c680f30\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c80b1c5385cb92e140c43bc982ee7cda6406eac9dfe8d9449eaf45332ccc506\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c80b1c5385cb92e140c43bc982ee7cda6406eac9dfe8d9449eaf45332ccc506\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1575fc8fd03d4047a3ed1c0c9df99b77e40296b876d2913a790128f39733d45e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1575fc8fd03d4047a3ed1c0c9df99b77e40296b876d2913a790128f39733d45e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pgwvb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:44Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:44 crc kubenswrapper[4651]: I1011 04:51:44.450229 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d425a775a5e278ee77231764e58cac3441e04c383eee54f69ba84488ca640eae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:44Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:44 crc kubenswrapper[4651]: I1011 04:51:44.489384 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55e4c9c2d46f67bb0f8ccdb2de27dc82f0280476613b3b30d587ff6f5784e787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:44Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:44 crc kubenswrapper[4651]: I1011 04:51:44.530978 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e5b8526fed77f2e541d6793dc36a902daebce60d367f28f9efb45f7fabb44fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e278c9a5d14f026733737226a6ffc91202b760e48622d5d0031678981fa5f857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:44Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:44 crc kubenswrapper[4651]: I1011 04:51:44.573246 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wz4hw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fbfdd781-994b-49b4-9c8e-edc0ea4145d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d37b5524c49533e7dad8b61f0e20b7d1e37b7ae1afc5b9bfe9146a0744202a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c96nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wz4hw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:44Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:44 crc kubenswrapper[4651]: I1011 04:51:44.617236 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47b36edf-f918-4105-bb64-66bb71a4b8c4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9521c3af1580aa2a62fc760916de9dc4abdc437700567a6ae5df3a96da2e8942\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6fdcb798e11fca3b7b427f5ef0ac955a893709ffedaa472aaab6c219994e940\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff57b9678d77f617cfc792258091526c2a2f8125c240abfe0b15f693abc948eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11e12782179b96dac12bfdd3a25526e93f18da255e2d2fb0d7522e6d9c373b4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e14956f18479aa61fa0755b9c6c4f26ba7ac18f3023e177a76b9a59101b37c1\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-11T04:51:33Z\\\",\\\"message\\\":\\\"W1011 04:51:22.963111 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1011 04:51:22.963696 1 crypto.go:601] Generating new CA for check-endpoints-signer@1760158282 cert, and key in /tmp/serving-cert-3362973750/serving-signer.crt, /tmp/serving-cert-3362973750/serving-signer.key\\\\nI1011 04:51:23.197526 1 observer_polling.go:159] Starting file observer\\\\nW1011 04:51:23.200452 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1011 04:51:23.200572 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1011 04:51:23.202397 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3362973750/tls.crt::/tmp/serving-cert-3362973750/tls.key\\\\\\\"\\\\nF1011 04:51:33.422520 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f8633747abf191f040cb14f5a76529487743229338070aeca597fc93dae8b3f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55c5a1cac9b1045ca931771e5a096d29e8d986a4d9baae8d779d94f2a30b6cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55c5a1cac9b1045ca931771e5a096d29e8d986a4d9baae8d779d94f2a30b6cd3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:44Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:44 crc kubenswrapper[4651]: I1011 04:51:44.655393 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"744aa461-2868-440d-a1b9-6d30c0c50b56\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aabba06dd47ad528f830a47cc79ddabb3789d24ebed62a6e8f976df1b156ef1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb996f17546fc77ac1b8ed27428aaa6060822daa0156cf016dbb24ed278403a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76ee2612930be6596c0186cc000d80039b52e225fd64102002ed9316a0605521\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c6263dc47a8284e9849da1fb15c4788174252f3637665ac6e4b19298ca90c587\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:44Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:44 crc kubenswrapper[4651]: I1011 04:51:44.695623 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:44Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:44 crc kubenswrapper[4651]: I1011 04:51:44.735314 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"519a1ae1-e964-48b0-8b61-835146df28c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f12e36a570de649e1df8107ea828cd8e65c18e111de191d3796fc1f3134e43c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wkk99\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://583486a7d6efd8d25aaeac78ea92f84834730c641516b6ade77aeda2baef58e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wkk99\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-78jnv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:44Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:44 crc kubenswrapper[4651]: I1011 04:51:44.771588 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-phsgk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e8fb74d-013d-4103-b029-e8416d079dcf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b7407b7da9820bbcdac4ce9c49c4520834a466b0db187c5f861972713ba1583f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4ftf8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-phsgk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:44Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:44 crc kubenswrapper[4651]: I1011 04:51:44.816674 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:44Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:44 crc kubenswrapper[4651]: I1011 04:51:44.858627 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:44Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:44 crc kubenswrapper[4651]: I1011 04:51:44.868802 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 04:51:44 crc kubenswrapper[4651]: E1011 04:51:44.868934 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 04:51:44 crc kubenswrapper[4651]: I1011 04:51:44.889988 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kwhmr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbcac3cb-b774-47ec-a86c-b22191d14d99\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52a0f9e11cf4e8c69d5b356b702ce9df8cbc935daadadf531004a23e5f6dad65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7n87\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kwhmr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:44Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:44 crc kubenswrapper[4651]: I1011 04:51:44.937260 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccd6180f4c5665d62ee9458795318ef2474c1938d19e6cbfec9c2174fe10e020\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ccd6180f4c5665d62ee9458795318ef2474c1938d19e6cbfec9c2174fe10e020\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6zt9s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:44Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:44 crc kubenswrapper[4651]: I1011 04:51:44.972622 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pgwvb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"215170a8-84a9-4e15-9b0e-c1200c680f30\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c80b1c5385cb92e140c43bc982ee7cda6406eac9dfe8d9449eaf45332ccc506\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c80b1c5385cb92e140c43bc982ee7cda6406eac9dfe8d9449eaf45332ccc506\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1575fc8fd03d4047a3ed1c0c9df99b77e40296b876d2913a790128f39733d45e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1575fc8fd03d4047a3ed1c0c9df99b77e40296b876d2913a790128f39733d45e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://626e511c110034060639be8565505dc122e834fc63a76d9c4e21dd1dbd5d5a62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://626e511c110034060639be8565505dc122e834fc63a76d9c4e21dd1dbd5d5a62\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pgwvb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:44Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:45 crc kubenswrapper[4651]: I1011 04:51:45.058942 4651 generic.go:334] "Generic (PLEG): container finished" podID="215170a8-84a9-4e15-9b0e-c1200c680f30" containerID="c9d55cd3f36dcad738ffd05b10e3f665c6bbb949c25365340e70b3c4b53e4175" exitCode=0 Oct 11 04:51:45 crc kubenswrapper[4651]: I1011 04:51:45.059051 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-pgwvb" event={"ID":"215170a8-84a9-4e15-9b0e-c1200c680f30","Type":"ContainerDied","Data":"c9d55cd3f36dcad738ffd05b10e3f665c6bbb949c25365340e70b3c4b53e4175"} Oct 11 04:51:45 crc kubenswrapper[4651]: I1011 04:51:45.066539 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" event={"ID":"28e01c08-a461-4f44-a49c-4bf92fd3a2ce","Type":"ContainerStarted","Data":"add3ed68e0bf59bc569248a3660cfc9a20995641ec65b0bf7133e47c366aad73"} Oct 11 04:51:45 crc kubenswrapper[4651]: I1011 04:51:45.080926 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-phsgk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e8fb74d-013d-4103-b029-e8416d079dcf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b7407b7da9820bbcdac4ce9c49c4520834a466b0db187c5f861972713ba1583f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4ftf8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-phsgk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:45Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:45 crc kubenswrapper[4651]: I1011 04:51:45.104930 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:45Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:45 crc kubenswrapper[4651]: I1011 04:51:45.117331 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:45Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:45 crc kubenswrapper[4651]: I1011 04:51:45.128815 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kwhmr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbcac3cb-b774-47ec-a86c-b22191d14d99\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52a0f9e11cf4e8c69d5b356b702ce9df8cbc935daadadf531004a23e5f6dad65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7n87\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kwhmr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:45Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:45 crc kubenswrapper[4651]: I1011 04:51:45.174977 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccd6180f4c5665d62ee9458795318ef2474c1938d19e6cbfec9c2174fe10e020\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ccd6180f4c5665d62ee9458795318ef2474c1938d19e6cbfec9c2174fe10e020\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6zt9s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:45Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:45 crc kubenswrapper[4651]: I1011 04:51:45.212096 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pgwvb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"215170a8-84a9-4e15-9b0e-c1200c680f30\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c80b1c5385cb92e140c43bc982ee7cda6406eac9dfe8d9449eaf45332ccc506\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c80b1c5385cb92e140c43bc982ee7cda6406eac9dfe8d9449eaf45332ccc506\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1575fc8fd03d4047a3ed1c0c9df99b77e40296b876d2913a790128f39733d45e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1575fc8fd03d4047a3ed1c0c9df99b77e40296b876d2913a790128f39733d45e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://626e511c110034060639be8565505dc122e834fc63a76d9c4e21dd1dbd5d5a62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://626e511c110034060639be8565505dc122e834fc63a76d9c4e21dd1dbd5d5a62\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9d55cd3f36dcad738ffd05b10e3f665c6bbb949c25365340e70b3c4b53e4175\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9d55cd3f36dcad738ffd05b10e3f665c6bbb949c25365340e70b3c4b53e4175\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pgwvb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:45Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:45 crc kubenswrapper[4651]: I1011 04:51:45.253002 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d425a775a5e278ee77231764e58cac3441e04c383eee54f69ba84488ca640eae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:45Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:45 crc kubenswrapper[4651]: I1011 04:51:45.291297 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55e4c9c2d46f67bb0f8ccdb2de27dc82f0280476613b3b30d587ff6f5784e787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:45Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:45 crc kubenswrapper[4651]: I1011 04:51:45.329413 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e5b8526fed77f2e541d6793dc36a902daebce60d367f28f9efb45f7fabb44fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e278c9a5d14f026733737226a6ffc91202b760e48622d5d0031678981fa5f857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:45Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:45 crc kubenswrapper[4651]: I1011 04:51:45.368924 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:45Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:45 crc kubenswrapper[4651]: I1011 04:51:45.407876 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"519a1ae1-e964-48b0-8b61-835146df28c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f12e36a570de649e1df8107ea828cd8e65c18e111de191d3796fc1f3134e43c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wkk99\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://583486a7d6efd8d25aaeac78ea92f84834730c641516b6ade77aeda2baef58e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wkk99\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-78jnv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:45Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:45 crc kubenswrapper[4651]: I1011 04:51:45.451025 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wz4hw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fbfdd781-994b-49b4-9c8e-edc0ea4145d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d37b5524c49533e7dad8b61f0e20b7d1e37b7ae1afc5b9bfe9146a0744202a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c96nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wz4hw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:45Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:45 crc kubenswrapper[4651]: I1011 04:51:45.491619 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47b36edf-f918-4105-bb64-66bb71a4b8c4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9521c3af1580aa2a62fc760916de9dc4abdc437700567a6ae5df3a96da2e8942\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6fdcb798e11fca3b7b427f5ef0ac955a893709ffedaa472aaab6c219994e940\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff57b9678d77f617cfc792258091526c2a2f8125c240abfe0b15f693abc948eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11e12782179b96dac12bfdd3a25526e93f18da255e2d2fb0d7522e6d9c373b4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e14956f18479aa61fa0755b9c6c4f26ba7ac18f3023e177a76b9a59101b37c1\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-11T04:51:33Z\\\",\\\"message\\\":\\\"W1011 04:51:22.963111 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1011 04:51:22.963696 1 crypto.go:601] Generating new CA for check-endpoints-signer@1760158282 cert, and key in /tmp/serving-cert-3362973750/serving-signer.crt, /tmp/serving-cert-3362973750/serving-signer.key\\\\nI1011 04:51:23.197526 1 observer_polling.go:159] Starting file observer\\\\nW1011 04:51:23.200452 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1011 04:51:23.200572 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1011 04:51:23.202397 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3362973750/tls.crt::/tmp/serving-cert-3362973750/tls.key\\\\\\\"\\\\nF1011 04:51:33.422520 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f8633747abf191f040cb14f5a76529487743229338070aeca597fc93dae8b3f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55c5a1cac9b1045ca931771e5a096d29e8d986a4d9baae8d779d94f2a30b6cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55c5a1cac9b1045ca931771e5a096d29e8d986a4d9baae8d779d94f2a30b6cd3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:45Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:45 crc kubenswrapper[4651]: I1011 04:51:45.532175 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"744aa461-2868-440d-a1b9-6d30c0c50b56\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aabba06dd47ad528f830a47cc79ddabb3789d24ebed62a6e8f976df1b156ef1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb996f17546fc77ac1b8ed27428aaa6060822daa0156cf016dbb24ed278403a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76ee2612930be6596c0186cc000d80039b52e225fd64102002ed9316a0605521\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c6263dc47a8284e9849da1fb15c4788174252f3637665ac6e4b19298ca90c587\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:45Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:45 crc kubenswrapper[4651]: I1011 04:51:45.541701 4651 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 04:51:45 crc kubenswrapper[4651]: I1011 04:51:45.543924 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:45 crc kubenswrapper[4651]: I1011 04:51:45.543993 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:45 crc kubenswrapper[4651]: I1011 04:51:45.544016 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:45 crc kubenswrapper[4651]: I1011 04:51:45.544154 4651 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 11 04:51:45 crc kubenswrapper[4651]: I1011 04:51:45.562054 4651 kubelet_node_status.go:115] "Node was previously registered" node="crc" Oct 11 04:51:45 crc kubenswrapper[4651]: I1011 04:51:45.562451 4651 kubelet_node_status.go:79] "Successfully registered node" node="crc" Oct 11 04:51:45 crc kubenswrapper[4651]: I1011 04:51:45.564445 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:45 crc kubenswrapper[4651]: I1011 04:51:45.564506 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:45 crc kubenswrapper[4651]: I1011 04:51:45.564532 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:45 crc kubenswrapper[4651]: I1011 04:51:45.564564 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:45 crc kubenswrapper[4651]: I1011 04:51:45.564589 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:45Z","lastTransitionTime":"2025-10-11T04:51:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:45 crc kubenswrapper[4651]: E1011 04:51:45.578073 4651 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:51:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:51:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:51:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:51:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a821a5c3-63e0-43db-82e0-e9c6e98ead52\\\",\\\"systemUUID\\\":\\\"f1c4ea71-0c28-43a7-99a4-e27ff72e186a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:45Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:45 crc kubenswrapper[4651]: I1011 04:51:45.583346 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:45 crc kubenswrapper[4651]: I1011 04:51:45.583382 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:45 crc kubenswrapper[4651]: I1011 04:51:45.583391 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:45 crc kubenswrapper[4651]: I1011 04:51:45.583409 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:45 crc kubenswrapper[4651]: I1011 04:51:45.583421 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:45Z","lastTransitionTime":"2025-10-11T04:51:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:45 crc kubenswrapper[4651]: E1011 04:51:45.599966 4651 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:51:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:51:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:51:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:51:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a821a5c3-63e0-43db-82e0-e9c6e98ead52\\\",\\\"systemUUID\\\":\\\"f1c4ea71-0c28-43a7-99a4-e27ff72e186a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:45Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:45 crc kubenswrapper[4651]: I1011 04:51:45.605313 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:45 crc kubenswrapper[4651]: I1011 04:51:45.605356 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:45 crc kubenswrapper[4651]: I1011 04:51:45.605365 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:45 crc kubenswrapper[4651]: I1011 04:51:45.605381 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:45 crc kubenswrapper[4651]: I1011 04:51:45.605393 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:45Z","lastTransitionTime":"2025-10-11T04:51:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:45 crc kubenswrapper[4651]: E1011 04:51:45.629036 4651 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:51:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:51:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:51:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:51:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a821a5c3-63e0-43db-82e0-e9c6e98ead52\\\",\\\"systemUUID\\\":\\\"f1c4ea71-0c28-43a7-99a4-e27ff72e186a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:45Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:45 crc kubenswrapper[4651]: I1011 04:51:45.633603 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:45 crc kubenswrapper[4651]: I1011 04:51:45.633678 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:45 crc kubenswrapper[4651]: I1011 04:51:45.633702 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:45 crc kubenswrapper[4651]: I1011 04:51:45.633734 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:45 crc kubenswrapper[4651]: I1011 04:51:45.633758 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:45Z","lastTransitionTime":"2025-10-11T04:51:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:45 crc kubenswrapper[4651]: E1011 04:51:45.650017 4651 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:51:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:51:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:51:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:51:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a821a5c3-63e0-43db-82e0-e9c6e98ead52\\\",\\\"systemUUID\\\":\\\"f1c4ea71-0c28-43a7-99a4-e27ff72e186a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:45Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:45 crc kubenswrapper[4651]: I1011 04:51:45.654529 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:45 crc kubenswrapper[4651]: I1011 04:51:45.654599 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:45 crc kubenswrapper[4651]: I1011 04:51:45.654619 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:45 crc kubenswrapper[4651]: I1011 04:51:45.654647 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:45 crc kubenswrapper[4651]: I1011 04:51:45.654664 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:45Z","lastTransitionTime":"2025-10-11T04:51:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:45 crc kubenswrapper[4651]: E1011 04:51:45.677342 4651 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:51:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:51:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:51:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:51:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a821a5c3-63e0-43db-82e0-e9c6e98ead52\\\",\\\"systemUUID\\\":\\\"f1c4ea71-0c28-43a7-99a4-e27ff72e186a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:45Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:45 crc kubenswrapper[4651]: E1011 04:51:45.677581 4651 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 11 04:51:45 crc kubenswrapper[4651]: I1011 04:51:45.679662 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:45 crc kubenswrapper[4651]: I1011 04:51:45.679717 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:45 crc kubenswrapper[4651]: I1011 04:51:45.679732 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:45 crc kubenswrapper[4651]: I1011 04:51:45.679754 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:45 crc kubenswrapper[4651]: I1011 04:51:45.679770 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:45Z","lastTransitionTime":"2025-10-11T04:51:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:45 crc kubenswrapper[4651]: I1011 04:51:45.782347 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:45 crc kubenswrapper[4651]: I1011 04:51:45.782388 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:45 crc kubenswrapper[4651]: I1011 04:51:45.782407 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:45 crc kubenswrapper[4651]: I1011 04:51:45.782424 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:45 crc kubenswrapper[4651]: I1011 04:51:45.782435 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:45Z","lastTransitionTime":"2025-10-11T04:51:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:45 crc kubenswrapper[4651]: I1011 04:51:45.869373 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 04:51:45 crc kubenswrapper[4651]: E1011 04:51:45.869558 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 04:51:45 crc kubenswrapper[4651]: I1011 04:51:45.869637 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 04:51:45 crc kubenswrapper[4651]: E1011 04:51:45.869716 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 04:51:45 crc kubenswrapper[4651]: I1011 04:51:45.884251 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:45 crc kubenswrapper[4651]: I1011 04:51:45.884282 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:45 crc kubenswrapper[4651]: I1011 04:51:45.884290 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:45 crc kubenswrapper[4651]: I1011 04:51:45.884303 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:45 crc kubenswrapper[4651]: I1011 04:51:45.884311 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:45Z","lastTransitionTime":"2025-10-11T04:51:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:45 crc kubenswrapper[4651]: I1011 04:51:45.988014 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:45 crc kubenswrapper[4651]: I1011 04:51:45.988072 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:45 crc kubenswrapper[4651]: I1011 04:51:45.988090 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:45 crc kubenswrapper[4651]: I1011 04:51:45.988115 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:45 crc kubenswrapper[4651]: I1011 04:51:45.988133 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:45Z","lastTransitionTime":"2025-10-11T04:51:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:46 crc kubenswrapper[4651]: I1011 04:51:46.083022 4651 generic.go:334] "Generic (PLEG): container finished" podID="215170a8-84a9-4e15-9b0e-c1200c680f30" containerID="27f664812354e7e2ea5feaa075c7f9f72262ddf0a7959196f7a7b14be5af1924" exitCode=0 Oct 11 04:51:46 crc kubenswrapper[4651]: I1011 04:51:46.083091 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-pgwvb" event={"ID":"215170a8-84a9-4e15-9b0e-c1200c680f30","Type":"ContainerDied","Data":"27f664812354e7e2ea5feaa075c7f9f72262ddf0a7959196f7a7b14be5af1924"} Oct 11 04:51:46 crc kubenswrapper[4651]: I1011 04:51:46.090415 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:46 crc kubenswrapper[4651]: I1011 04:51:46.090465 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:46 crc kubenswrapper[4651]: I1011 04:51:46.090481 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:46 crc kubenswrapper[4651]: I1011 04:51:46.090504 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:46 crc kubenswrapper[4651]: I1011 04:51:46.090520 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:46Z","lastTransitionTime":"2025-10-11T04:51:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:46 crc kubenswrapper[4651]: I1011 04:51:46.109254 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:46Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:46 crc kubenswrapper[4651]: I1011 04:51:46.129541 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-phsgk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e8fb74d-013d-4103-b029-e8416d079dcf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b7407b7da9820bbcdac4ce9c49c4520834a466b0db187c5f861972713ba1583f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4ftf8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-phsgk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:46Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:46 crc kubenswrapper[4651]: I1011 04:51:46.162157 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccd6180f4c5665d62ee9458795318ef2474c1938d19e6cbfec9c2174fe10e020\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ccd6180f4c5665d62ee9458795318ef2474c1938d19e6cbfec9c2174fe10e020\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6zt9s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:46Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:46 crc kubenswrapper[4651]: I1011 04:51:46.183521 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pgwvb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"215170a8-84a9-4e15-9b0e-c1200c680f30\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c80b1c5385cb92e140c43bc982ee7cda6406eac9dfe8d9449eaf45332ccc506\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c80b1c5385cb92e140c43bc982ee7cda6406eac9dfe8d9449eaf45332ccc506\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1575fc8fd03d4047a3ed1c0c9df99b77e40296b876d2913a790128f39733d45e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1575fc8fd03d4047a3ed1c0c9df99b77e40296b876d2913a790128f39733d45e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://626e511c110034060639be8565505dc122e834fc63a76d9c4e21dd1dbd5d5a62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://626e511c110034060639be8565505dc122e834fc63a76d9c4e21dd1dbd5d5a62\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9d55cd3f36dcad738ffd05b10e3f665c6bbb949c25365340e70b3c4b53e4175\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9d55cd3f36dcad738ffd05b10e3f665c6bbb949c25365340e70b3c4b53e4175\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27f664812354e7e2ea5feaa075c7f9f72262ddf0a7959196f7a7b14be5af1924\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://27f664812354e7e2ea5feaa075c7f9f72262ddf0a7959196f7a7b14be5af1924\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pgwvb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:46Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:46 crc kubenswrapper[4651]: I1011 04:51:46.193744 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:46 crc kubenswrapper[4651]: I1011 04:51:46.193816 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:46 crc kubenswrapper[4651]: I1011 04:51:46.193866 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:46 crc kubenswrapper[4651]: I1011 04:51:46.193902 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:46 crc kubenswrapper[4651]: I1011 04:51:46.193921 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:46Z","lastTransitionTime":"2025-10-11T04:51:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:46 crc kubenswrapper[4651]: I1011 04:51:46.202814 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:46Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:46 crc kubenswrapper[4651]: I1011 04:51:46.218360 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kwhmr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbcac3cb-b774-47ec-a86c-b22191d14d99\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52a0f9e11cf4e8c69d5b356b702ce9df8cbc935daadadf531004a23e5f6dad65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7n87\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kwhmr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:46Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:46 crc kubenswrapper[4651]: I1011 04:51:46.238688 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e5b8526fed77f2e541d6793dc36a902daebce60d367f28f9efb45f7fabb44fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e278c9a5d14f026733737226a6ffc91202b760e48622d5d0031678981fa5f857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:46Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:46 crc kubenswrapper[4651]: I1011 04:51:46.254604 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d425a775a5e278ee77231764e58cac3441e04c383eee54f69ba84488ca640eae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:46Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:46 crc kubenswrapper[4651]: I1011 04:51:46.268414 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55e4c9c2d46f67bb0f8ccdb2de27dc82f0280476613b3b30d587ff6f5784e787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:46Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:46 crc kubenswrapper[4651]: I1011 04:51:46.287027 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:46Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:46 crc kubenswrapper[4651]: I1011 04:51:46.296889 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:46 crc kubenswrapper[4651]: I1011 04:51:46.296922 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:46 crc kubenswrapper[4651]: I1011 04:51:46.296930 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:46 crc kubenswrapper[4651]: I1011 04:51:46.296947 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:46 crc kubenswrapper[4651]: I1011 04:51:46.296958 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:46Z","lastTransitionTime":"2025-10-11T04:51:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:46 crc kubenswrapper[4651]: I1011 04:51:46.303535 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"519a1ae1-e964-48b0-8b61-835146df28c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f12e36a570de649e1df8107ea828cd8e65c18e111de191d3796fc1f3134e43c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wkk99\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://583486a7d6efd8d25aaeac78ea92f84834730c641516b6ade77aeda2baef58e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wkk99\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-78jnv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:46Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:46 crc kubenswrapper[4651]: I1011 04:51:46.323209 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wz4hw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fbfdd781-994b-49b4-9c8e-edc0ea4145d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d37b5524c49533e7dad8b61f0e20b7d1e37b7ae1afc5b9bfe9146a0744202a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c96nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wz4hw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:46Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:46 crc kubenswrapper[4651]: I1011 04:51:46.343207 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47b36edf-f918-4105-bb64-66bb71a4b8c4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9521c3af1580aa2a62fc760916de9dc4abdc437700567a6ae5df3a96da2e8942\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6fdcb798e11fca3b7b427f5ef0ac955a893709ffedaa472aaab6c219994e940\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff57b9678d77f617cfc792258091526c2a2f8125c240abfe0b15f693abc948eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11e12782179b96dac12bfdd3a25526e93f18da255e2d2fb0d7522e6d9c373b4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e14956f18479aa61fa0755b9c6c4f26ba7ac18f3023e177a76b9a59101b37c1\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-11T04:51:33Z\\\",\\\"message\\\":\\\"W1011 04:51:22.963111 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1011 04:51:22.963696 1 crypto.go:601] Generating new CA for check-endpoints-signer@1760158282 cert, and key in /tmp/serving-cert-3362973750/serving-signer.crt, /tmp/serving-cert-3362973750/serving-signer.key\\\\nI1011 04:51:23.197526 1 observer_polling.go:159] Starting file observer\\\\nW1011 04:51:23.200452 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1011 04:51:23.200572 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1011 04:51:23.202397 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3362973750/tls.crt::/tmp/serving-cert-3362973750/tls.key\\\\\\\"\\\\nF1011 04:51:33.422520 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f8633747abf191f040cb14f5a76529487743229338070aeca597fc93dae8b3f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55c5a1cac9b1045ca931771e5a096d29e8d986a4d9baae8d779d94f2a30b6cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55c5a1cac9b1045ca931771e5a096d29e8d986a4d9baae8d779d94f2a30b6cd3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:46Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:46 crc kubenswrapper[4651]: I1011 04:51:46.361588 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"744aa461-2868-440d-a1b9-6d30c0c50b56\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aabba06dd47ad528f830a47cc79ddabb3789d24ebed62a6e8f976df1b156ef1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb996f17546fc77ac1b8ed27428aaa6060822daa0156cf016dbb24ed278403a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76ee2612930be6596c0186cc000d80039b52e225fd64102002ed9316a0605521\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c6263dc47a8284e9849da1fb15c4788174252f3637665ac6e4b19298ca90c587\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:46Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:46 crc kubenswrapper[4651]: I1011 04:51:46.399546 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:46 crc kubenswrapper[4651]: I1011 04:51:46.399603 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:46 crc kubenswrapper[4651]: I1011 04:51:46.399620 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:46 crc kubenswrapper[4651]: I1011 04:51:46.399645 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:46 crc kubenswrapper[4651]: I1011 04:51:46.399664 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:46Z","lastTransitionTime":"2025-10-11T04:51:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:46 crc kubenswrapper[4651]: I1011 04:51:46.504259 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:46 crc kubenswrapper[4651]: I1011 04:51:46.504350 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:46 crc kubenswrapper[4651]: I1011 04:51:46.504373 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:46 crc kubenswrapper[4651]: I1011 04:51:46.504396 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:46 crc kubenswrapper[4651]: I1011 04:51:46.504453 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:46Z","lastTransitionTime":"2025-10-11T04:51:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:46 crc kubenswrapper[4651]: I1011 04:51:46.607210 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:46 crc kubenswrapper[4651]: I1011 04:51:46.607521 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:46 crc kubenswrapper[4651]: I1011 04:51:46.607534 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:46 crc kubenswrapper[4651]: I1011 04:51:46.607552 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:46 crc kubenswrapper[4651]: I1011 04:51:46.607565 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:46Z","lastTransitionTime":"2025-10-11T04:51:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:46 crc kubenswrapper[4651]: I1011 04:51:46.709546 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:46 crc kubenswrapper[4651]: I1011 04:51:46.709587 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:46 crc kubenswrapper[4651]: I1011 04:51:46.709600 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:46 crc kubenswrapper[4651]: I1011 04:51:46.709617 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:46 crc kubenswrapper[4651]: I1011 04:51:46.709629 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:46Z","lastTransitionTime":"2025-10-11T04:51:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:46 crc kubenswrapper[4651]: I1011 04:51:46.812418 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:46 crc kubenswrapper[4651]: I1011 04:51:46.812458 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:46 crc kubenswrapper[4651]: I1011 04:51:46.812469 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:46 crc kubenswrapper[4651]: I1011 04:51:46.812486 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:46 crc kubenswrapper[4651]: I1011 04:51:46.812498 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:46Z","lastTransitionTime":"2025-10-11T04:51:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:46 crc kubenswrapper[4651]: I1011 04:51:46.868547 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 04:51:46 crc kubenswrapper[4651]: E1011 04:51:46.868694 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 04:51:46 crc kubenswrapper[4651]: I1011 04:51:46.915693 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:46 crc kubenswrapper[4651]: I1011 04:51:46.915775 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:46 crc kubenswrapper[4651]: I1011 04:51:46.915796 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:46 crc kubenswrapper[4651]: I1011 04:51:46.915860 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:46 crc kubenswrapper[4651]: I1011 04:51:46.915879 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:46Z","lastTransitionTime":"2025-10-11T04:51:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:47 crc kubenswrapper[4651]: I1011 04:51:47.019175 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:47 crc kubenswrapper[4651]: I1011 04:51:47.019244 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:47 crc kubenswrapper[4651]: I1011 04:51:47.019260 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:47 crc kubenswrapper[4651]: I1011 04:51:47.019631 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:47 crc kubenswrapper[4651]: I1011 04:51:47.019704 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:47Z","lastTransitionTime":"2025-10-11T04:51:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:47 crc kubenswrapper[4651]: I1011 04:51:47.093013 4651 generic.go:334] "Generic (PLEG): container finished" podID="215170a8-84a9-4e15-9b0e-c1200c680f30" containerID="d3b9eccb883d262047b87eae2358f6d27cbd8df5b6917125a8a6259b2eb3433a" exitCode=0 Oct 11 04:51:47 crc kubenswrapper[4651]: I1011 04:51:47.093117 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-pgwvb" event={"ID":"215170a8-84a9-4e15-9b0e-c1200c680f30","Type":"ContainerDied","Data":"d3b9eccb883d262047b87eae2358f6d27cbd8df5b6917125a8a6259b2eb3433a"} Oct 11 04:51:47 crc kubenswrapper[4651]: I1011 04:51:47.101392 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" event={"ID":"28e01c08-a461-4f44-a49c-4bf92fd3a2ce","Type":"ContainerStarted","Data":"508370f3dcd6abe4dbf8d3bcb4cf18954c12de8d7b30a7c3e5f1145bd9dbbe3d"} Oct 11 04:51:47 crc kubenswrapper[4651]: I1011 04:51:47.101813 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" Oct 11 04:51:47 crc kubenswrapper[4651]: I1011 04:51:47.101933 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" Oct 11 04:51:47 crc kubenswrapper[4651]: I1011 04:51:47.115630 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d425a775a5e278ee77231764e58cac3441e04c383eee54f69ba84488ca640eae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:47Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:47 crc kubenswrapper[4651]: I1011 04:51:47.122492 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:47 crc kubenswrapper[4651]: I1011 04:51:47.122558 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:47 crc kubenswrapper[4651]: I1011 04:51:47.122579 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:47 crc kubenswrapper[4651]: I1011 04:51:47.122605 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:47 crc kubenswrapper[4651]: I1011 04:51:47.122625 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:47Z","lastTransitionTime":"2025-10-11T04:51:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:47 crc kubenswrapper[4651]: I1011 04:51:47.135294 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" Oct 11 04:51:47 crc kubenswrapper[4651]: I1011 04:51:47.139189 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55e4c9c2d46f67bb0f8ccdb2de27dc82f0280476613b3b30d587ff6f5784e787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:47Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:47 crc kubenswrapper[4651]: I1011 04:51:47.142410 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" Oct 11 04:51:47 crc kubenswrapper[4651]: I1011 04:51:47.157443 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e5b8526fed77f2e541d6793dc36a902daebce60d367f28f9efb45f7fabb44fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e278c9a5d14f026733737226a6ffc91202b760e48622d5d0031678981fa5f857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:47Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:47 crc kubenswrapper[4651]: I1011 04:51:47.173302 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:47Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:47 crc kubenswrapper[4651]: I1011 04:51:47.193399 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"519a1ae1-e964-48b0-8b61-835146df28c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f12e36a570de649e1df8107ea828cd8e65c18e111de191d3796fc1f3134e43c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wkk99\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://583486a7d6efd8d25aaeac78ea92f84834730c641516b6ade77aeda2baef58e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wkk99\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-78jnv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:47Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:47 crc kubenswrapper[4651]: I1011 04:51:47.212920 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wz4hw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fbfdd781-994b-49b4-9c8e-edc0ea4145d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d37b5524c49533e7dad8b61f0e20b7d1e37b7ae1afc5b9bfe9146a0744202a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c96nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wz4hw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:47Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:47 crc kubenswrapper[4651]: I1011 04:51:47.226157 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:47 crc kubenswrapper[4651]: I1011 04:51:47.226199 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:47 crc kubenswrapper[4651]: I1011 04:51:47.226213 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:47 crc kubenswrapper[4651]: I1011 04:51:47.226229 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:47 crc kubenswrapper[4651]: I1011 04:51:47.226240 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:47Z","lastTransitionTime":"2025-10-11T04:51:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:47 crc kubenswrapper[4651]: I1011 04:51:47.230249 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47b36edf-f918-4105-bb64-66bb71a4b8c4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9521c3af1580aa2a62fc760916de9dc4abdc437700567a6ae5df3a96da2e8942\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6fdcb798e11fca3b7b427f5ef0ac955a893709ffedaa472aaab6c219994e940\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff57b9678d77f617cfc792258091526c2a2f8125c240abfe0b15f693abc948eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11e12782179b96dac12bfdd3a25526e93f18da255e2d2fb0d7522e6d9c373b4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e14956f18479aa61fa0755b9c6c4f26ba7ac18f3023e177a76b9a59101b37c1\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-11T04:51:33Z\\\",\\\"message\\\":\\\"W1011 04:51:22.963111 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1011 04:51:22.963696 1 crypto.go:601] Generating new CA for check-endpoints-signer@1760158282 cert, and key in /tmp/serving-cert-3362973750/serving-signer.crt, /tmp/serving-cert-3362973750/serving-signer.key\\\\nI1011 04:51:23.197526 1 observer_polling.go:159] Starting file observer\\\\nW1011 04:51:23.200452 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1011 04:51:23.200572 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1011 04:51:23.202397 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3362973750/tls.crt::/tmp/serving-cert-3362973750/tls.key\\\\\\\"\\\\nF1011 04:51:33.422520 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f8633747abf191f040cb14f5a76529487743229338070aeca597fc93dae8b3f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55c5a1cac9b1045ca931771e5a096d29e8d986a4d9baae8d779d94f2a30b6cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55c5a1cac9b1045ca931771e5a096d29e8d986a4d9baae8d779d94f2a30b6cd3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:47Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:47 crc kubenswrapper[4651]: I1011 04:51:47.246591 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"744aa461-2868-440d-a1b9-6d30c0c50b56\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aabba06dd47ad528f830a47cc79ddabb3789d24ebed62a6e8f976df1b156ef1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb996f17546fc77ac1b8ed27428aaa6060822daa0156cf016dbb24ed278403a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76ee2612930be6596c0186cc000d80039b52e225fd64102002ed9316a0605521\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c6263dc47a8284e9849da1fb15c4788174252f3637665ac6e4b19298ca90c587\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:47Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:47 crc kubenswrapper[4651]: I1011 04:51:47.257074 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-phsgk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e8fb74d-013d-4103-b029-e8416d079dcf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b7407b7da9820bbcdac4ce9c49c4520834a466b0db187c5f861972713ba1583f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4ftf8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-phsgk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:47Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:47 crc kubenswrapper[4651]: I1011 04:51:47.269600 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:47Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:47 crc kubenswrapper[4651]: I1011 04:51:47.282935 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:47Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:47 crc kubenswrapper[4651]: I1011 04:51:47.292519 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kwhmr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbcac3cb-b774-47ec-a86c-b22191d14d99\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52a0f9e11cf4e8c69d5b356b702ce9df8cbc935daadadf531004a23e5f6dad65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7n87\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kwhmr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:47Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:47 crc kubenswrapper[4651]: I1011 04:51:47.311195 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccd6180f4c5665d62ee9458795318ef2474c1938d19e6cbfec9c2174fe10e020\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ccd6180f4c5665d62ee9458795318ef2474c1938d19e6cbfec9c2174fe10e020\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6zt9s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:47Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:47 crc kubenswrapper[4651]: I1011 04:51:47.324632 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pgwvb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"215170a8-84a9-4e15-9b0e-c1200c680f30\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c80b1c5385cb92e140c43bc982ee7cda6406eac9dfe8d9449eaf45332ccc506\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c80b1c5385cb92e140c43bc982ee7cda6406eac9dfe8d9449eaf45332ccc506\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1575fc8fd03d4047a3ed1c0c9df99b77e40296b876d2913a790128f39733d45e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1575fc8fd03d4047a3ed1c0c9df99b77e40296b876d2913a790128f39733d45e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://626e511c110034060639be8565505dc122e834fc63a76d9c4e21dd1dbd5d5a62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://626e511c110034060639be8565505dc122e834fc63a76d9c4e21dd1dbd5d5a62\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9d55cd3f36dcad738ffd05b10e3f665c6bbb949c25365340e70b3c4b53e4175\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9d55cd3f36dcad738ffd05b10e3f665c6bbb949c25365340e70b3c4b53e4175\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27f664812354e7e2ea5feaa075c7f9f72262ddf0a7959196f7a7b14be5af1924\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://27f664812354e7e2ea5feaa075c7f9f72262ddf0a7959196f7a7b14be5af1924\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3b9eccb883d262047b87eae2358f6d27cbd8df5b6917125a8a6259b2eb3433a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3b9eccb883d262047b87eae2358f6d27cbd8df5b6917125a8a6259b2eb3433a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pgwvb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:47Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:47 crc kubenswrapper[4651]: I1011 04:51:47.328567 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:47 crc kubenswrapper[4651]: I1011 04:51:47.328604 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:47 crc kubenswrapper[4651]: I1011 04:51:47.328616 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:47 crc kubenswrapper[4651]: I1011 04:51:47.328633 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:47 crc kubenswrapper[4651]: I1011 04:51:47.328653 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:47Z","lastTransitionTime":"2025-10-11T04:51:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:47 crc kubenswrapper[4651]: I1011 04:51:47.338722 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e5b8526fed77f2e541d6793dc36a902daebce60d367f28f9efb45f7fabb44fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e278c9a5d14f026733737226a6ffc91202b760e48622d5d0031678981fa5f857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:47Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:47 crc kubenswrapper[4651]: I1011 04:51:47.354731 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d425a775a5e278ee77231764e58cac3441e04c383eee54f69ba84488ca640eae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:47Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:47 crc kubenswrapper[4651]: I1011 04:51:47.368001 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55e4c9c2d46f67bb0f8ccdb2de27dc82f0280476613b3b30d587ff6f5784e787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:47Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:47 crc kubenswrapper[4651]: I1011 04:51:47.383943 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:47Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:47 crc kubenswrapper[4651]: I1011 04:51:47.397797 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"519a1ae1-e964-48b0-8b61-835146df28c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f12e36a570de649e1df8107ea828cd8e65c18e111de191d3796fc1f3134e43c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wkk99\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://583486a7d6efd8d25aaeac78ea92f84834730c641516b6ade77aeda2baef58e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wkk99\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-78jnv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:47Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:47 crc kubenswrapper[4651]: I1011 04:51:47.413626 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wz4hw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fbfdd781-994b-49b4-9c8e-edc0ea4145d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d37b5524c49533e7dad8b61f0e20b7d1e37b7ae1afc5b9bfe9146a0744202a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c96nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wz4hw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:47Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:47 crc kubenswrapper[4651]: I1011 04:51:47.431452 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:47 crc kubenswrapper[4651]: I1011 04:51:47.431510 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:47 crc kubenswrapper[4651]: I1011 04:51:47.431526 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:47 crc kubenswrapper[4651]: I1011 04:51:47.431547 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:47 crc kubenswrapper[4651]: I1011 04:51:47.431563 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:47Z","lastTransitionTime":"2025-10-11T04:51:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:47 crc kubenswrapper[4651]: I1011 04:51:47.434947 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47b36edf-f918-4105-bb64-66bb71a4b8c4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9521c3af1580aa2a62fc760916de9dc4abdc437700567a6ae5df3a96da2e8942\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6fdcb798e11fca3b7b427f5ef0ac955a893709ffedaa472aaab6c219994e940\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff57b9678d77f617cfc792258091526c2a2f8125c240abfe0b15f693abc948eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11e12782179b96dac12bfdd3a25526e93f18da255e2d2fb0d7522e6d9c373b4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e14956f18479aa61fa0755b9c6c4f26ba7ac18f3023e177a76b9a59101b37c1\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-11T04:51:33Z\\\",\\\"message\\\":\\\"W1011 04:51:22.963111 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1011 04:51:22.963696 1 crypto.go:601] Generating new CA for check-endpoints-signer@1760158282 cert, and key in /tmp/serving-cert-3362973750/serving-signer.crt, /tmp/serving-cert-3362973750/serving-signer.key\\\\nI1011 04:51:23.197526 1 observer_polling.go:159] Starting file observer\\\\nW1011 04:51:23.200452 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1011 04:51:23.200572 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1011 04:51:23.202397 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3362973750/tls.crt::/tmp/serving-cert-3362973750/tls.key\\\\\\\"\\\\nF1011 04:51:33.422520 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f8633747abf191f040cb14f5a76529487743229338070aeca597fc93dae8b3f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55c5a1cac9b1045ca931771e5a096d29e8d986a4d9baae8d779d94f2a30b6cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55c5a1cac9b1045ca931771e5a096d29e8d986a4d9baae8d779d94f2a30b6cd3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:47Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:47 crc kubenswrapper[4651]: I1011 04:51:47.452808 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"744aa461-2868-440d-a1b9-6d30c0c50b56\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aabba06dd47ad528f830a47cc79ddabb3789d24ebed62a6e8f976df1b156ef1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb996f17546fc77ac1b8ed27428aaa6060822daa0156cf016dbb24ed278403a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76ee2612930be6596c0186cc000d80039b52e225fd64102002ed9316a0605521\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c6263dc47a8284e9849da1fb15c4788174252f3637665ac6e4b19298ca90c587\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:47Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:47 crc kubenswrapper[4651]: I1011 04:51:47.453439 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 04:51:47 crc kubenswrapper[4651]: E1011 04:51:47.453625 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 04:51:55.453595459 +0000 UTC m=+36.349828295 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 04:51:47 crc kubenswrapper[4651]: I1011 04:51:47.453808 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 04:51:47 crc kubenswrapper[4651]: I1011 04:51:47.453914 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 04:51:47 crc kubenswrapper[4651]: E1011 04:51:47.453961 4651 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 11 04:51:47 crc kubenswrapper[4651]: E1011 04:51:47.454030 4651 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 11 04:51:47 crc kubenswrapper[4651]: E1011 04:51:47.454111 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-11 04:51:55.454087842 +0000 UTC m=+36.350320658 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 11 04:51:47 crc kubenswrapper[4651]: E1011 04:51:47.454141 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-11 04:51:55.454127683 +0000 UTC m=+36.350360639 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 11 04:51:47 crc kubenswrapper[4651]: I1011 04:51:47.469282 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:47Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:47 crc kubenswrapper[4651]: I1011 04:51:47.482487 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-phsgk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e8fb74d-013d-4103-b029-e8416d079dcf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b7407b7da9820bbcdac4ce9c49c4520834a466b0db187c5f861972713ba1583f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4ftf8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-phsgk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:47Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:47 crc kubenswrapper[4651]: I1011 04:51:47.508509 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0fd0aaae2760a1c934aed54b8e07528f0f78db1149c9e1f8674bec90b61a7078\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://788d6a46d740de869fe7cd1d28479f8115a3515c7bd2482e06cbc4dba7b27fac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68954082d4369b9b4d95ba1f15e19593dc330b698562be413055b2ba012b5f8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3eed8335754437c935f07223bf4c4f40f9ab0bbdc9a86b7610f7f6fd55140e37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23b3fde15e637081cbf5fad2e55ccea1f0fe63d39ff5f82a60ce05ffeef9a837\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99f927067aabe4690e99fe50a069afdf68b22d950a8ae141235571ced468c44e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://508370f3dcd6abe4dbf8d3bcb4cf18954c12de8d7b30a7c3e5f1145bd9dbbe3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://add3ed68e0bf59bc569248a3660cfc9a20995641ec65b0bf7133e47c366aad73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccd6180f4c5665d62ee9458795318ef2474c1938d19e6cbfec9c2174fe10e020\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ccd6180f4c5665d62ee9458795318ef2474c1938d19e6cbfec9c2174fe10e020\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6zt9s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:47Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:47 crc kubenswrapper[4651]: I1011 04:51:47.523764 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pgwvb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"215170a8-84a9-4e15-9b0e-c1200c680f30\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c80b1c5385cb92e140c43bc982ee7cda6406eac9dfe8d9449eaf45332ccc506\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c80b1c5385cb92e140c43bc982ee7cda6406eac9dfe8d9449eaf45332ccc506\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1575fc8fd03d4047a3ed1c0c9df99b77e40296b876d2913a790128f39733d45e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1575fc8fd03d4047a3ed1c0c9df99b77e40296b876d2913a790128f39733d45e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://626e511c110034060639be8565505dc122e834fc63a76d9c4e21dd1dbd5d5a62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://626e511c110034060639be8565505dc122e834fc63a76d9c4e21dd1dbd5d5a62\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9d55cd3f36dcad738ffd05b10e3f665c6bbb949c25365340e70b3c4b53e4175\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9d55cd3f36dcad738ffd05b10e3f665c6bbb949c25365340e70b3c4b53e4175\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27f664812354e7e2ea5feaa075c7f9f72262ddf0a7959196f7a7b14be5af1924\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://27f664812354e7e2ea5feaa075c7f9f72262ddf0a7959196f7a7b14be5af1924\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3b9eccb883d262047b87eae2358f6d27cbd8df5b6917125a8a6259b2eb3433a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3b9eccb883d262047b87eae2358f6d27cbd8df5b6917125a8a6259b2eb3433a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pgwvb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:47Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:47 crc kubenswrapper[4651]: I1011 04:51:47.534090 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:47 crc kubenswrapper[4651]: I1011 04:51:47.534133 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:47 crc kubenswrapper[4651]: I1011 04:51:47.534144 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:47 crc kubenswrapper[4651]: I1011 04:51:47.534162 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:47 crc kubenswrapper[4651]: I1011 04:51:47.534176 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:47Z","lastTransitionTime":"2025-10-11T04:51:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:47 crc kubenswrapper[4651]: I1011 04:51:47.539037 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:47Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:47 crc kubenswrapper[4651]: I1011 04:51:47.551095 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kwhmr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbcac3cb-b774-47ec-a86c-b22191d14d99\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52a0f9e11cf4e8c69d5b356b702ce9df8cbc935daadadf531004a23e5f6dad65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7n87\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kwhmr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:47Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:47 crc kubenswrapper[4651]: I1011 04:51:47.554556 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 04:51:47 crc kubenswrapper[4651]: I1011 04:51:47.554593 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 04:51:47 crc kubenswrapper[4651]: E1011 04:51:47.554712 4651 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 11 04:51:47 crc kubenswrapper[4651]: E1011 04:51:47.554729 4651 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 11 04:51:47 crc kubenswrapper[4651]: E1011 04:51:47.554743 4651 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 11 04:51:47 crc kubenswrapper[4651]: E1011 04:51:47.554787 4651 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 11 04:51:47 crc kubenswrapper[4651]: E1011 04:51:47.554844 4651 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 11 04:51:47 crc kubenswrapper[4651]: E1011 04:51:47.554858 4651 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 11 04:51:47 crc kubenswrapper[4651]: E1011 04:51:47.554797 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-11 04:51:55.554782961 +0000 UTC m=+36.451015767 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 11 04:51:47 crc kubenswrapper[4651]: E1011 04:51:47.554948 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-11 04:51:55.554917264 +0000 UTC m=+36.451150080 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 11 04:51:47 crc kubenswrapper[4651]: I1011 04:51:47.636277 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:47 crc kubenswrapper[4651]: I1011 04:51:47.636319 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:47 crc kubenswrapper[4651]: I1011 04:51:47.636330 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:47 crc kubenswrapper[4651]: I1011 04:51:47.636346 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:47 crc kubenswrapper[4651]: I1011 04:51:47.636371 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:47Z","lastTransitionTime":"2025-10-11T04:51:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:47 crc kubenswrapper[4651]: I1011 04:51:47.738894 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:47 crc kubenswrapper[4651]: I1011 04:51:47.738953 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:47 crc kubenswrapper[4651]: I1011 04:51:47.738971 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:47 crc kubenswrapper[4651]: I1011 04:51:47.738997 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:47 crc kubenswrapper[4651]: I1011 04:51:47.739018 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:47Z","lastTransitionTime":"2025-10-11T04:51:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:47 crc kubenswrapper[4651]: I1011 04:51:47.841888 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:47 crc kubenswrapper[4651]: I1011 04:51:47.841936 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:47 crc kubenswrapper[4651]: I1011 04:51:47.841953 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:47 crc kubenswrapper[4651]: I1011 04:51:47.842005 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:47 crc kubenswrapper[4651]: I1011 04:51:47.842020 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:47Z","lastTransitionTime":"2025-10-11T04:51:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:47 crc kubenswrapper[4651]: I1011 04:51:47.869122 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 04:51:47 crc kubenswrapper[4651]: I1011 04:51:47.869122 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 04:51:47 crc kubenswrapper[4651]: E1011 04:51:47.869263 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 04:51:47 crc kubenswrapper[4651]: E1011 04:51:47.869315 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 04:51:47 crc kubenswrapper[4651]: I1011 04:51:47.945014 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:47 crc kubenswrapper[4651]: I1011 04:51:47.945087 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:47 crc kubenswrapper[4651]: I1011 04:51:47.945104 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:47 crc kubenswrapper[4651]: I1011 04:51:47.945130 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:47 crc kubenswrapper[4651]: I1011 04:51:47.945150 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:47Z","lastTransitionTime":"2025-10-11T04:51:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:48 crc kubenswrapper[4651]: I1011 04:51:48.049253 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:48 crc kubenswrapper[4651]: I1011 04:51:48.049379 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:48 crc kubenswrapper[4651]: I1011 04:51:48.049404 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:48 crc kubenswrapper[4651]: I1011 04:51:48.049433 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:48 crc kubenswrapper[4651]: I1011 04:51:48.049454 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:48Z","lastTransitionTime":"2025-10-11T04:51:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:48 crc kubenswrapper[4651]: I1011 04:51:48.110492 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-pgwvb" event={"ID":"215170a8-84a9-4e15-9b0e-c1200c680f30","Type":"ContainerStarted","Data":"2a036609457cdb5a74a4ccf65ffa2e502f32994e6e3f1ef8a4fd4b89c5454773"} Oct 11 04:51:48 crc kubenswrapper[4651]: I1011 04:51:48.110532 4651 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 11 04:51:48 crc kubenswrapper[4651]: I1011 04:51:48.128051 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:48Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:48 crc kubenswrapper[4651]: I1011 04:51:48.143743 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"519a1ae1-e964-48b0-8b61-835146df28c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f12e36a570de649e1df8107ea828cd8e65c18e111de191d3796fc1f3134e43c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wkk99\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://583486a7d6efd8d25aaeac78ea92f84834730c641516b6ade77aeda2baef58e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wkk99\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-78jnv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:48Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:48 crc kubenswrapper[4651]: I1011 04:51:48.153177 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:48 crc kubenswrapper[4651]: I1011 04:51:48.153233 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:48 crc kubenswrapper[4651]: I1011 04:51:48.153249 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:48 crc kubenswrapper[4651]: I1011 04:51:48.153274 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:48 crc kubenswrapper[4651]: I1011 04:51:48.153290 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:48Z","lastTransitionTime":"2025-10-11T04:51:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:48 crc kubenswrapper[4651]: I1011 04:51:48.158063 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wz4hw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fbfdd781-994b-49b4-9c8e-edc0ea4145d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d37b5524c49533e7dad8b61f0e20b7d1e37b7ae1afc5b9bfe9146a0744202a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c96nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wz4hw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:48Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:48 crc kubenswrapper[4651]: I1011 04:51:48.173988 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47b36edf-f918-4105-bb64-66bb71a4b8c4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9521c3af1580aa2a62fc760916de9dc4abdc437700567a6ae5df3a96da2e8942\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6fdcb798e11fca3b7b427f5ef0ac955a893709ffedaa472aaab6c219994e940\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff57b9678d77f617cfc792258091526c2a2f8125c240abfe0b15f693abc948eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11e12782179b96dac12bfdd3a25526e93f18da255e2d2fb0d7522e6d9c373b4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e14956f18479aa61fa0755b9c6c4f26ba7ac18f3023e177a76b9a59101b37c1\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-11T04:51:33Z\\\",\\\"message\\\":\\\"W1011 04:51:22.963111 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1011 04:51:22.963696 1 crypto.go:601] Generating new CA for check-endpoints-signer@1760158282 cert, and key in /tmp/serving-cert-3362973750/serving-signer.crt, /tmp/serving-cert-3362973750/serving-signer.key\\\\nI1011 04:51:23.197526 1 observer_polling.go:159] Starting file observer\\\\nW1011 04:51:23.200452 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1011 04:51:23.200572 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1011 04:51:23.202397 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3362973750/tls.crt::/tmp/serving-cert-3362973750/tls.key\\\\\\\"\\\\nF1011 04:51:33.422520 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f8633747abf191f040cb14f5a76529487743229338070aeca597fc93dae8b3f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55c5a1cac9b1045ca931771e5a096d29e8d986a4d9baae8d779d94f2a30b6cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55c5a1cac9b1045ca931771e5a096d29e8d986a4d9baae8d779d94f2a30b6cd3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:48Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:48 crc kubenswrapper[4651]: I1011 04:51:48.193914 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"744aa461-2868-440d-a1b9-6d30c0c50b56\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aabba06dd47ad528f830a47cc79ddabb3789d24ebed62a6e8f976df1b156ef1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb996f17546fc77ac1b8ed27428aaa6060822daa0156cf016dbb24ed278403a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76ee2612930be6596c0186cc000d80039b52e225fd64102002ed9316a0605521\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c6263dc47a8284e9849da1fb15c4788174252f3637665ac6e4b19298ca90c587\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:48Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:48 crc kubenswrapper[4651]: I1011 04:51:48.207099 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-phsgk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e8fb74d-013d-4103-b029-e8416d079dcf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b7407b7da9820bbcdac4ce9c49c4520834a466b0db187c5f861972713ba1583f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4ftf8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-phsgk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:48Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:48 crc kubenswrapper[4651]: I1011 04:51:48.223085 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:48Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:48 crc kubenswrapper[4651]: I1011 04:51:48.238448 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:48Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:48 crc kubenswrapper[4651]: I1011 04:51:48.249674 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kwhmr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbcac3cb-b774-47ec-a86c-b22191d14d99\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52a0f9e11cf4e8c69d5b356b702ce9df8cbc935daadadf531004a23e5f6dad65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7n87\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kwhmr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:48Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:48 crc kubenswrapper[4651]: I1011 04:51:48.256147 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:48 crc kubenswrapper[4651]: I1011 04:51:48.256205 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:48 crc kubenswrapper[4651]: I1011 04:51:48.256244 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:48 crc kubenswrapper[4651]: I1011 04:51:48.256264 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:48 crc kubenswrapper[4651]: I1011 04:51:48.256278 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:48Z","lastTransitionTime":"2025-10-11T04:51:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:48 crc kubenswrapper[4651]: I1011 04:51:48.268229 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0fd0aaae2760a1c934aed54b8e07528f0f78db1149c9e1f8674bec90b61a7078\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://788d6a46d740de869fe7cd1d28479f8115a3515c7bd2482e06cbc4dba7b27fac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68954082d4369b9b4d95ba1f15e19593dc330b698562be413055b2ba012b5f8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3eed8335754437c935f07223bf4c4f40f9ab0bbdc9a86b7610f7f6fd55140e37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23b3fde15e637081cbf5fad2e55ccea1f0fe63d39ff5f82a60ce05ffeef9a837\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99f927067aabe4690e99fe50a069afdf68b22d950a8ae141235571ced468c44e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://508370f3dcd6abe4dbf8d3bcb4cf18954c12de8d7b30a7c3e5f1145bd9dbbe3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://add3ed68e0bf59bc569248a3660cfc9a20995641ec65b0bf7133e47c366aad73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccd6180f4c5665d62ee9458795318ef2474c1938d19e6cbfec9c2174fe10e020\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ccd6180f4c5665d62ee9458795318ef2474c1938d19e6cbfec9c2174fe10e020\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6zt9s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:48Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:48 crc kubenswrapper[4651]: I1011 04:51:48.291974 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pgwvb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"215170a8-84a9-4e15-9b0e-c1200c680f30\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a036609457cdb5a74a4ccf65ffa2e502f32994e6e3f1ef8a4fd4b89c5454773\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c80b1c5385cb92e140c43bc982ee7cda6406eac9dfe8d9449eaf45332ccc506\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c80b1c5385cb92e140c43bc982ee7cda6406eac9dfe8d9449eaf45332ccc506\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1575fc8fd03d4047a3ed1c0c9df99b77e40296b876d2913a790128f39733d45e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1575fc8fd03d4047a3ed1c0c9df99b77e40296b876d2913a790128f39733d45e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://626e511c110034060639be8565505dc122e834fc63a76d9c4e21dd1dbd5d5a62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://626e511c110034060639be8565505dc122e834fc63a76d9c4e21dd1dbd5d5a62\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9d55cd3f36dcad738ffd05b10e3f665c6bbb949c25365340e70b3c4b53e4175\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9d55cd3f36dcad738ffd05b10e3f665c6bbb949c25365340e70b3c4b53e4175\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27f664812354e7e2ea5feaa075c7f9f72262ddf0a7959196f7a7b14be5af1924\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://27f664812354e7e2ea5feaa075c7f9f72262ddf0a7959196f7a7b14be5af1924\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3b9eccb883d262047b87eae2358f6d27cbd8df5b6917125a8a6259b2eb3433a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3b9eccb883d262047b87eae2358f6d27cbd8df5b6917125a8a6259b2eb3433a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pgwvb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:48Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:48 crc kubenswrapper[4651]: I1011 04:51:48.304129 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d425a775a5e278ee77231764e58cac3441e04c383eee54f69ba84488ca640eae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:48Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:48 crc kubenswrapper[4651]: I1011 04:51:48.317391 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55e4c9c2d46f67bb0f8ccdb2de27dc82f0280476613b3b30d587ff6f5784e787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:48Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:48 crc kubenswrapper[4651]: I1011 04:51:48.330525 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e5b8526fed77f2e541d6793dc36a902daebce60d367f28f9efb45f7fabb44fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e278c9a5d14f026733737226a6ffc91202b760e48622d5d0031678981fa5f857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:48Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:48 crc kubenswrapper[4651]: I1011 04:51:48.358279 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:48 crc kubenswrapper[4651]: I1011 04:51:48.358312 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:48 crc kubenswrapper[4651]: I1011 04:51:48.358321 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:48 crc kubenswrapper[4651]: I1011 04:51:48.358335 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:48 crc kubenswrapper[4651]: I1011 04:51:48.358344 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:48Z","lastTransitionTime":"2025-10-11T04:51:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:48 crc kubenswrapper[4651]: I1011 04:51:48.460149 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:48 crc kubenswrapper[4651]: I1011 04:51:48.460179 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:48 crc kubenswrapper[4651]: I1011 04:51:48.460186 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:48 crc kubenswrapper[4651]: I1011 04:51:48.460199 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:48 crc kubenswrapper[4651]: I1011 04:51:48.460207 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:48Z","lastTransitionTime":"2025-10-11T04:51:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:48 crc kubenswrapper[4651]: I1011 04:51:48.563125 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:48 crc kubenswrapper[4651]: I1011 04:51:48.563197 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:48 crc kubenswrapper[4651]: I1011 04:51:48.563220 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:48 crc kubenswrapper[4651]: I1011 04:51:48.563250 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:48 crc kubenswrapper[4651]: I1011 04:51:48.563274 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:48Z","lastTransitionTime":"2025-10-11T04:51:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:48 crc kubenswrapper[4651]: I1011 04:51:48.665879 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:48 crc kubenswrapper[4651]: I1011 04:51:48.665929 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:48 crc kubenswrapper[4651]: I1011 04:51:48.665947 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:48 crc kubenswrapper[4651]: I1011 04:51:48.665967 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:48 crc kubenswrapper[4651]: I1011 04:51:48.665981 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:48Z","lastTransitionTime":"2025-10-11T04:51:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:48 crc kubenswrapper[4651]: I1011 04:51:48.767930 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:48 crc kubenswrapper[4651]: I1011 04:51:48.767969 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:48 crc kubenswrapper[4651]: I1011 04:51:48.767980 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:48 crc kubenswrapper[4651]: I1011 04:51:48.767995 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:48 crc kubenswrapper[4651]: I1011 04:51:48.768003 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:48Z","lastTransitionTime":"2025-10-11T04:51:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:48 crc kubenswrapper[4651]: I1011 04:51:48.868938 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 04:51:48 crc kubenswrapper[4651]: E1011 04:51:48.869432 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 04:51:48 crc kubenswrapper[4651]: I1011 04:51:48.870019 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:48 crc kubenswrapper[4651]: I1011 04:51:48.870050 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:48 crc kubenswrapper[4651]: I1011 04:51:48.870059 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:48 crc kubenswrapper[4651]: I1011 04:51:48.870074 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:48 crc kubenswrapper[4651]: I1011 04:51:48.870083 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:48Z","lastTransitionTime":"2025-10-11T04:51:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:48 crc kubenswrapper[4651]: I1011 04:51:48.972565 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:48 crc kubenswrapper[4651]: I1011 04:51:48.972607 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:48 crc kubenswrapper[4651]: I1011 04:51:48.972619 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:48 crc kubenswrapper[4651]: I1011 04:51:48.972636 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:48 crc kubenswrapper[4651]: I1011 04:51:48.972655 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:48Z","lastTransitionTime":"2025-10-11T04:51:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:49 crc kubenswrapper[4651]: I1011 04:51:49.075004 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:49 crc kubenswrapper[4651]: I1011 04:51:49.075046 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:49 crc kubenswrapper[4651]: I1011 04:51:49.075061 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:49 crc kubenswrapper[4651]: I1011 04:51:49.075081 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:49 crc kubenswrapper[4651]: I1011 04:51:49.075095 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:49Z","lastTransitionTime":"2025-10-11T04:51:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:49 crc kubenswrapper[4651]: I1011 04:51:49.114467 4651 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 11 04:51:49 crc kubenswrapper[4651]: I1011 04:51:49.177318 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:49 crc kubenswrapper[4651]: I1011 04:51:49.177373 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:49 crc kubenswrapper[4651]: I1011 04:51:49.177387 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:49 crc kubenswrapper[4651]: I1011 04:51:49.177408 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:49 crc kubenswrapper[4651]: I1011 04:51:49.177423 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:49Z","lastTransitionTime":"2025-10-11T04:51:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:49 crc kubenswrapper[4651]: I1011 04:51:49.280310 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:49 crc kubenswrapper[4651]: I1011 04:51:49.280376 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:49 crc kubenswrapper[4651]: I1011 04:51:49.280395 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:49 crc kubenswrapper[4651]: I1011 04:51:49.280421 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:49 crc kubenswrapper[4651]: I1011 04:51:49.280439 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:49Z","lastTransitionTime":"2025-10-11T04:51:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:49 crc kubenswrapper[4651]: I1011 04:51:49.383090 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:49 crc kubenswrapper[4651]: I1011 04:51:49.383133 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:49 crc kubenswrapper[4651]: I1011 04:51:49.383146 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:49 crc kubenswrapper[4651]: I1011 04:51:49.383161 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:49 crc kubenswrapper[4651]: I1011 04:51:49.383173 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:49Z","lastTransitionTime":"2025-10-11T04:51:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:49 crc kubenswrapper[4651]: I1011 04:51:49.487355 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:49 crc kubenswrapper[4651]: I1011 04:51:49.487654 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:49 crc kubenswrapper[4651]: I1011 04:51:49.487664 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:49 crc kubenswrapper[4651]: I1011 04:51:49.487680 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:49 crc kubenswrapper[4651]: I1011 04:51:49.487691 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:49Z","lastTransitionTime":"2025-10-11T04:51:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:49 crc kubenswrapper[4651]: I1011 04:51:49.590557 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:49 crc kubenswrapper[4651]: I1011 04:51:49.590623 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:49 crc kubenswrapper[4651]: I1011 04:51:49.590640 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:49 crc kubenswrapper[4651]: I1011 04:51:49.590663 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:49 crc kubenswrapper[4651]: I1011 04:51:49.590680 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:49Z","lastTransitionTime":"2025-10-11T04:51:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:49 crc kubenswrapper[4651]: I1011 04:51:49.693802 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:49 crc kubenswrapper[4651]: I1011 04:51:49.693894 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:49 crc kubenswrapper[4651]: I1011 04:51:49.693914 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:49 crc kubenswrapper[4651]: I1011 04:51:49.693939 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:49 crc kubenswrapper[4651]: I1011 04:51:49.693957 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:49Z","lastTransitionTime":"2025-10-11T04:51:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:49 crc kubenswrapper[4651]: I1011 04:51:49.797077 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:49 crc kubenswrapper[4651]: I1011 04:51:49.797140 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:49 crc kubenswrapper[4651]: I1011 04:51:49.797158 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:49 crc kubenswrapper[4651]: I1011 04:51:49.797183 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:49 crc kubenswrapper[4651]: I1011 04:51:49.797200 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:49Z","lastTransitionTime":"2025-10-11T04:51:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:49 crc kubenswrapper[4651]: I1011 04:51:49.869535 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 04:51:49 crc kubenswrapper[4651]: I1011 04:51:49.869555 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 04:51:49 crc kubenswrapper[4651]: E1011 04:51:49.869781 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 04:51:49 crc kubenswrapper[4651]: E1011 04:51:49.869949 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 04:51:49 crc kubenswrapper[4651]: I1011 04:51:49.886711 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-phsgk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e8fb74d-013d-4103-b029-e8416d079dcf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b7407b7da9820bbcdac4ce9c49c4520834a466b0db187c5f861972713ba1583f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4ftf8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-phsgk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:49Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:49 crc kubenswrapper[4651]: I1011 04:51:49.899123 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:49 crc kubenswrapper[4651]: I1011 04:51:49.899171 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:49 crc kubenswrapper[4651]: I1011 04:51:49.899181 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:49 crc kubenswrapper[4651]: I1011 04:51:49.899201 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:49 crc kubenswrapper[4651]: I1011 04:51:49.899213 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:49Z","lastTransitionTime":"2025-10-11T04:51:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:49 crc kubenswrapper[4651]: I1011 04:51:49.909785 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:49Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:49 crc kubenswrapper[4651]: I1011 04:51:49.932952 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:49Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:49 crc kubenswrapper[4651]: I1011 04:51:49.949384 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kwhmr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbcac3cb-b774-47ec-a86c-b22191d14d99\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52a0f9e11cf4e8c69d5b356b702ce9df8cbc935daadadf531004a23e5f6dad65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7n87\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kwhmr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:49Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:49 crc kubenswrapper[4651]: I1011 04:51:49.987861 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0fd0aaae2760a1c934aed54b8e07528f0f78db1149c9e1f8674bec90b61a7078\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://788d6a46d740de869fe7cd1d28479f8115a3515c7bd2482e06cbc4dba7b27fac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68954082d4369b9b4d95ba1f15e19593dc330b698562be413055b2ba012b5f8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3eed8335754437c935f07223bf4c4f40f9ab0bbdc9a86b7610f7f6fd55140e37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23b3fde15e637081cbf5fad2e55ccea1f0fe63d39ff5f82a60ce05ffeef9a837\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99f927067aabe4690e99fe50a069afdf68b22d950a8ae141235571ced468c44e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://508370f3dcd6abe4dbf8d3bcb4cf18954c12de8d7b30a7c3e5f1145bd9dbbe3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://add3ed68e0bf59bc569248a3660cfc9a20995641ec65b0bf7133e47c366aad73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccd6180f4c5665d62ee9458795318ef2474c1938d19e6cbfec9c2174fe10e020\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ccd6180f4c5665d62ee9458795318ef2474c1938d19e6cbfec9c2174fe10e020\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6zt9s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:49Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:50 crc kubenswrapper[4651]: I1011 04:51:50.001611 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:50 crc kubenswrapper[4651]: I1011 04:51:50.001669 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:50 crc kubenswrapper[4651]: I1011 04:51:50.001683 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:50 crc kubenswrapper[4651]: I1011 04:51:50.001704 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:50 crc kubenswrapper[4651]: I1011 04:51:50.001720 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:50Z","lastTransitionTime":"2025-10-11T04:51:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:50 crc kubenswrapper[4651]: I1011 04:51:50.011314 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pgwvb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"215170a8-84a9-4e15-9b0e-c1200c680f30\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a036609457cdb5a74a4ccf65ffa2e502f32994e6e3f1ef8a4fd4b89c5454773\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c80b1c5385cb92e140c43bc982ee7cda6406eac9dfe8d9449eaf45332ccc506\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c80b1c5385cb92e140c43bc982ee7cda6406eac9dfe8d9449eaf45332ccc506\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1575fc8fd03d4047a3ed1c0c9df99b77e40296b876d2913a790128f39733d45e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1575fc8fd03d4047a3ed1c0c9df99b77e40296b876d2913a790128f39733d45e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://626e511c110034060639be8565505dc122e834fc63a76d9c4e21dd1dbd5d5a62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://626e511c110034060639be8565505dc122e834fc63a76d9c4e21dd1dbd5d5a62\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9d55cd3f36dcad738ffd05b10e3f665c6bbb949c25365340e70b3c4b53e4175\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9d55cd3f36dcad738ffd05b10e3f665c6bbb949c25365340e70b3c4b53e4175\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27f664812354e7e2ea5feaa075c7f9f72262ddf0a7959196f7a7b14be5af1924\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://27f664812354e7e2ea5feaa075c7f9f72262ddf0a7959196f7a7b14be5af1924\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3b9eccb883d262047b87eae2358f6d27cbd8df5b6917125a8a6259b2eb3433a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3b9eccb883d262047b87eae2358f6d27cbd8df5b6917125a8a6259b2eb3433a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pgwvb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:50Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:50 crc kubenswrapper[4651]: I1011 04:51:50.030770 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d425a775a5e278ee77231764e58cac3441e04c383eee54f69ba84488ca640eae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:50Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:50 crc kubenswrapper[4651]: I1011 04:51:50.046058 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55e4c9c2d46f67bb0f8ccdb2de27dc82f0280476613b3b30d587ff6f5784e787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:50Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:50 crc kubenswrapper[4651]: I1011 04:51:50.061723 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e5b8526fed77f2e541d6793dc36a902daebce60d367f28f9efb45f7fabb44fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e278c9a5d14f026733737226a6ffc91202b760e48622d5d0031678981fa5f857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:50Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:50 crc kubenswrapper[4651]: I1011 04:51:50.080091 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47b36edf-f918-4105-bb64-66bb71a4b8c4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9521c3af1580aa2a62fc760916de9dc4abdc437700567a6ae5df3a96da2e8942\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6fdcb798e11fca3b7b427f5ef0ac955a893709ffedaa472aaab6c219994e940\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff57b9678d77f617cfc792258091526c2a2f8125c240abfe0b15f693abc948eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11e12782179b96dac12bfdd3a25526e93f18da255e2d2fb0d7522e6d9c373b4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e14956f18479aa61fa0755b9c6c4f26ba7ac18f3023e177a76b9a59101b37c1\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-11T04:51:33Z\\\",\\\"message\\\":\\\"W1011 04:51:22.963111 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1011 04:51:22.963696 1 crypto.go:601] Generating new CA for check-endpoints-signer@1760158282 cert, and key in /tmp/serving-cert-3362973750/serving-signer.crt, /tmp/serving-cert-3362973750/serving-signer.key\\\\nI1011 04:51:23.197526 1 observer_polling.go:159] Starting file observer\\\\nW1011 04:51:23.200452 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1011 04:51:23.200572 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1011 04:51:23.202397 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3362973750/tls.crt::/tmp/serving-cert-3362973750/tls.key\\\\\\\"\\\\nF1011 04:51:33.422520 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f8633747abf191f040cb14f5a76529487743229338070aeca597fc93dae8b3f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55c5a1cac9b1045ca931771e5a096d29e8d986a4d9baae8d779d94f2a30b6cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55c5a1cac9b1045ca931771e5a096d29e8d986a4d9baae8d779d94f2a30b6cd3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:50Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:50 crc kubenswrapper[4651]: I1011 04:51:50.098950 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"744aa461-2868-440d-a1b9-6d30c0c50b56\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aabba06dd47ad528f830a47cc79ddabb3789d24ebed62a6e8f976df1b156ef1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb996f17546fc77ac1b8ed27428aaa6060822daa0156cf016dbb24ed278403a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76ee2612930be6596c0186cc000d80039b52e225fd64102002ed9316a0605521\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c6263dc47a8284e9849da1fb15c4788174252f3637665ac6e4b19298ca90c587\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:50Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:50 crc kubenswrapper[4651]: I1011 04:51:50.104344 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:50 crc kubenswrapper[4651]: I1011 04:51:50.104389 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:50 crc kubenswrapper[4651]: I1011 04:51:50.104400 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:50 crc kubenswrapper[4651]: I1011 04:51:50.104423 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:50 crc kubenswrapper[4651]: I1011 04:51:50.104435 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:50Z","lastTransitionTime":"2025-10-11T04:51:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:50 crc kubenswrapper[4651]: I1011 04:51:50.112787 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:50Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:50 crc kubenswrapper[4651]: I1011 04:51:50.118229 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6zt9s_28e01c08-a461-4f44-a49c-4bf92fd3a2ce/ovnkube-controller/0.log" Oct 11 04:51:50 crc kubenswrapper[4651]: I1011 04:51:50.121942 4651 generic.go:334] "Generic (PLEG): container finished" podID="28e01c08-a461-4f44-a49c-4bf92fd3a2ce" containerID="508370f3dcd6abe4dbf8d3bcb4cf18954c12de8d7b30a7c3e5f1145bd9dbbe3d" exitCode=1 Oct 11 04:51:50 crc kubenswrapper[4651]: I1011 04:51:50.122014 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" event={"ID":"28e01c08-a461-4f44-a49c-4bf92fd3a2ce","Type":"ContainerDied","Data":"508370f3dcd6abe4dbf8d3bcb4cf18954c12de8d7b30a7c3e5f1145bd9dbbe3d"} Oct 11 04:51:50 crc kubenswrapper[4651]: I1011 04:51:50.123006 4651 scope.go:117] "RemoveContainer" containerID="508370f3dcd6abe4dbf8d3bcb4cf18954c12de8d7b30a7c3e5f1145bd9dbbe3d" Oct 11 04:51:50 crc kubenswrapper[4651]: I1011 04:51:50.126095 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"519a1ae1-e964-48b0-8b61-835146df28c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f12e36a570de649e1df8107ea828cd8e65c18e111de191d3796fc1f3134e43c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wkk99\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://583486a7d6efd8d25aaeac78ea92f84834730c641516b6ade77aeda2baef58e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wkk99\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-78jnv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:50Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:50 crc kubenswrapper[4651]: I1011 04:51:50.140502 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wz4hw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fbfdd781-994b-49b4-9c8e-edc0ea4145d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d37b5524c49533e7dad8b61f0e20b7d1e37b7ae1afc5b9bfe9146a0744202a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c96nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wz4hw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:50Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:50 crc kubenswrapper[4651]: I1011 04:51:50.154232 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:50Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:50 crc kubenswrapper[4651]: I1011 04:51:50.164295 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"519a1ae1-e964-48b0-8b61-835146df28c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f12e36a570de649e1df8107ea828cd8e65c18e111de191d3796fc1f3134e43c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wkk99\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://583486a7d6efd8d25aaeac78ea92f84834730c641516b6ade77aeda2baef58e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wkk99\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-78jnv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:50Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:50 crc kubenswrapper[4651]: I1011 04:51:50.178033 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wz4hw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fbfdd781-994b-49b4-9c8e-edc0ea4145d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d37b5524c49533e7dad8b61f0e20b7d1e37b7ae1afc5b9bfe9146a0744202a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c96nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wz4hw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:50Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:50 crc kubenswrapper[4651]: I1011 04:51:50.190347 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47b36edf-f918-4105-bb64-66bb71a4b8c4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9521c3af1580aa2a62fc760916de9dc4abdc437700567a6ae5df3a96da2e8942\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6fdcb798e11fca3b7b427f5ef0ac955a893709ffedaa472aaab6c219994e940\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff57b9678d77f617cfc792258091526c2a2f8125c240abfe0b15f693abc948eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11e12782179b96dac12bfdd3a25526e93f18da255e2d2fb0d7522e6d9c373b4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e14956f18479aa61fa0755b9c6c4f26ba7ac18f3023e177a76b9a59101b37c1\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-11T04:51:33Z\\\",\\\"message\\\":\\\"W1011 04:51:22.963111 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1011 04:51:22.963696 1 crypto.go:601] Generating new CA for check-endpoints-signer@1760158282 cert, and key in /tmp/serving-cert-3362973750/serving-signer.crt, /tmp/serving-cert-3362973750/serving-signer.key\\\\nI1011 04:51:23.197526 1 observer_polling.go:159] Starting file observer\\\\nW1011 04:51:23.200452 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1011 04:51:23.200572 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1011 04:51:23.202397 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3362973750/tls.crt::/tmp/serving-cert-3362973750/tls.key\\\\\\\"\\\\nF1011 04:51:33.422520 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f8633747abf191f040cb14f5a76529487743229338070aeca597fc93dae8b3f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55c5a1cac9b1045ca931771e5a096d29e8d986a4d9baae8d779d94f2a30b6cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55c5a1cac9b1045ca931771e5a096d29e8d986a4d9baae8d779d94f2a30b6cd3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:50Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:50 crc kubenswrapper[4651]: I1011 04:51:50.200785 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"744aa461-2868-440d-a1b9-6d30c0c50b56\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aabba06dd47ad528f830a47cc79ddabb3789d24ebed62a6e8f976df1b156ef1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb996f17546fc77ac1b8ed27428aaa6060822daa0156cf016dbb24ed278403a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76ee2612930be6596c0186cc000d80039b52e225fd64102002ed9316a0605521\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c6263dc47a8284e9849da1fb15c4788174252f3637665ac6e4b19298ca90c587\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:50Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:50 crc kubenswrapper[4651]: I1011 04:51:50.206430 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:50 crc kubenswrapper[4651]: I1011 04:51:50.206570 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:50 crc kubenswrapper[4651]: I1011 04:51:50.206689 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:50 crc kubenswrapper[4651]: I1011 04:51:50.206837 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:50 crc kubenswrapper[4651]: I1011 04:51:50.206984 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:50Z","lastTransitionTime":"2025-10-11T04:51:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:50 crc kubenswrapper[4651]: I1011 04:51:50.213273 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:50Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:50 crc kubenswrapper[4651]: I1011 04:51:50.223795 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-phsgk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e8fb74d-013d-4103-b029-e8416d079dcf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b7407b7da9820bbcdac4ce9c49c4520834a466b0db187c5f861972713ba1583f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4ftf8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-phsgk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:50Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:50 crc kubenswrapper[4651]: I1011 04:51:50.241243 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0fd0aaae2760a1c934aed54b8e07528f0f78db1149c9e1f8674bec90b61a7078\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://788d6a46d740de869fe7cd1d28479f8115a3515c7bd2482e06cbc4dba7b27fac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68954082d4369b9b4d95ba1f15e19593dc330b698562be413055b2ba012b5f8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3eed8335754437c935f07223bf4c4f40f9ab0bbdc9a86b7610f7f6fd55140e37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23b3fde15e637081cbf5fad2e55ccea1f0fe63d39ff5f82a60ce05ffeef9a837\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99f927067aabe4690e99fe50a069afdf68b22d950a8ae141235571ced468c44e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://508370f3dcd6abe4dbf8d3bcb4cf18954c12de8d7b30a7c3e5f1145bd9dbbe3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://508370f3dcd6abe4dbf8d3bcb4cf18954c12de8d7b30a7c3e5f1145bd9dbbe3d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-11T04:51:49Z\\\",\\\"message\\\":\\\"aselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1011 04:51:49.433512 5944 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1011 04:51:49.433523 5944 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1011 04:51:49.433531 5944 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1011 04:51:49.433894 5944 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1011 04:51:49.433960 5944 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1011 04:51:49.434048 5944 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1011 04:51:49.434066 5944 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1011 04:51:49.434091 5944 factory.go:656] Stopping watch factory\\\\nI1011 04:51:49.434091 5944 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1011 04:51:49.434124 5944 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1011 04:51:49.434236 5944 handler.go:208] Removed *v1.Node event handler 2\\\\nI1011 04:51:49.434250 5944 handler.go:208] Removed *v1.Node ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://add3ed68e0bf59bc569248a3660cfc9a20995641ec65b0bf7133e47c366aad73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccd6180f4c5665d62ee9458795318ef2474c1938d19e6cbfec9c2174fe10e020\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ccd6180f4c5665d62ee9458795318ef2474c1938d19e6cbfec9c2174fe10e020\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6zt9s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:50Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:50 crc kubenswrapper[4651]: I1011 04:51:50.254642 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pgwvb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"215170a8-84a9-4e15-9b0e-c1200c680f30\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a036609457cdb5a74a4ccf65ffa2e502f32994e6e3f1ef8a4fd4b89c5454773\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c80b1c5385cb92e140c43bc982ee7cda6406eac9dfe8d9449eaf45332ccc506\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c80b1c5385cb92e140c43bc982ee7cda6406eac9dfe8d9449eaf45332ccc506\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1575fc8fd03d4047a3ed1c0c9df99b77e40296b876d2913a790128f39733d45e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1575fc8fd03d4047a3ed1c0c9df99b77e40296b876d2913a790128f39733d45e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://626e511c110034060639be8565505dc122e834fc63a76d9c4e21dd1dbd5d5a62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://626e511c110034060639be8565505dc122e834fc63a76d9c4e21dd1dbd5d5a62\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9d55cd3f36dcad738ffd05b10e3f665c6bbb949c25365340e70b3c4b53e4175\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9d55cd3f36dcad738ffd05b10e3f665c6bbb949c25365340e70b3c4b53e4175\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27f664812354e7e2ea5feaa075c7f9f72262ddf0a7959196f7a7b14be5af1924\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://27f664812354e7e2ea5feaa075c7f9f72262ddf0a7959196f7a7b14be5af1924\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3b9eccb883d262047b87eae2358f6d27cbd8df5b6917125a8a6259b2eb3433a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3b9eccb883d262047b87eae2358f6d27cbd8df5b6917125a8a6259b2eb3433a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pgwvb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:50Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:50 crc kubenswrapper[4651]: I1011 04:51:50.266958 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:50Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:50 crc kubenswrapper[4651]: I1011 04:51:50.277013 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kwhmr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbcac3cb-b774-47ec-a86c-b22191d14d99\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52a0f9e11cf4e8c69d5b356b702ce9df8cbc935daadadf531004a23e5f6dad65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7n87\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kwhmr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:50Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:50 crc kubenswrapper[4651]: I1011 04:51:50.288151 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e5b8526fed77f2e541d6793dc36a902daebce60d367f28f9efb45f7fabb44fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e278c9a5d14f026733737226a6ffc91202b760e48622d5d0031678981fa5f857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:50Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:50 crc kubenswrapper[4651]: I1011 04:51:50.302453 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d425a775a5e278ee77231764e58cac3441e04c383eee54f69ba84488ca640eae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:50Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:50 crc kubenswrapper[4651]: I1011 04:51:50.309404 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:50 crc kubenswrapper[4651]: I1011 04:51:50.309431 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:50 crc kubenswrapper[4651]: I1011 04:51:50.309440 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:50 crc kubenswrapper[4651]: I1011 04:51:50.309454 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:50 crc kubenswrapper[4651]: I1011 04:51:50.309462 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:50Z","lastTransitionTime":"2025-10-11T04:51:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:50 crc kubenswrapper[4651]: I1011 04:51:50.314397 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55e4c9c2d46f67bb0f8ccdb2de27dc82f0280476613b3b30d587ff6f5784e787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:50Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:50 crc kubenswrapper[4651]: I1011 04:51:50.411329 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:50 crc kubenswrapper[4651]: I1011 04:51:50.411404 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:50 crc kubenswrapper[4651]: I1011 04:51:50.411416 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:50 crc kubenswrapper[4651]: I1011 04:51:50.411433 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:50 crc kubenswrapper[4651]: I1011 04:51:50.411444 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:50Z","lastTransitionTime":"2025-10-11T04:51:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:50 crc kubenswrapper[4651]: I1011 04:51:50.514185 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:50 crc kubenswrapper[4651]: I1011 04:51:50.514257 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:50 crc kubenswrapper[4651]: I1011 04:51:50.514280 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:50 crc kubenswrapper[4651]: I1011 04:51:50.514313 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:50 crc kubenswrapper[4651]: I1011 04:51:50.514332 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:50Z","lastTransitionTime":"2025-10-11T04:51:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:50 crc kubenswrapper[4651]: I1011 04:51:50.617437 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:50 crc kubenswrapper[4651]: I1011 04:51:50.617513 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:50 crc kubenswrapper[4651]: I1011 04:51:50.617534 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:50 crc kubenswrapper[4651]: I1011 04:51:50.617611 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:50 crc kubenswrapper[4651]: I1011 04:51:50.617632 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:50Z","lastTransitionTime":"2025-10-11T04:51:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:50 crc kubenswrapper[4651]: I1011 04:51:50.720468 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:50 crc kubenswrapper[4651]: I1011 04:51:50.720512 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:50 crc kubenswrapper[4651]: I1011 04:51:50.720527 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:50 crc kubenswrapper[4651]: I1011 04:51:50.720546 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:50 crc kubenswrapper[4651]: I1011 04:51:50.720560 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:50Z","lastTransitionTime":"2025-10-11T04:51:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:50 crc kubenswrapper[4651]: I1011 04:51:50.823909 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:50 crc kubenswrapper[4651]: I1011 04:51:50.823953 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:50 crc kubenswrapper[4651]: I1011 04:51:50.823964 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:50 crc kubenswrapper[4651]: I1011 04:51:50.823980 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:50 crc kubenswrapper[4651]: I1011 04:51:50.823992 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:50Z","lastTransitionTime":"2025-10-11T04:51:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:50 crc kubenswrapper[4651]: I1011 04:51:50.869286 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 04:51:50 crc kubenswrapper[4651]: E1011 04:51:50.869476 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 04:51:50 crc kubenswrapper[4651]: I1011 04:51:50.926314 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:50 crc kubenswrapper[4651]: I1011 04:51:50.926380 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:50 crc kubenswrapper[4651]: I1011 04:51:50.926407 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:50 crc kubenswrapper[4651]: I1011 04:51:50.926437 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:50 crc kubenswrapper[4651]: I1011 04:51:50.926458 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:50Z","lastTransitionTime":"2025-10-11T04:51:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:51 crc kubenswrapper[4651]: I1011 04:51:51.028584 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:51 crc kubenswrapper[4651]: I1011 04:51:51.028625 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:51 crc kubenswrapper[4651]: I1011 04:51:51.028636 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:51 crc kubenswrapper[4651]: I1011 04:51:51.028651 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:51 crc kubenswrapper[4651]: I1011 04:51:51.028662 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:51Z","lastTransitionTime":"2025-10-11T04:51:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:51 crc kubenswrapper[4651]: I1011 04:51:51.126716 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6zt9s_28e01c08-a461-4f44-a49c-4bf92fd3a2ce/ovnkube-controller/1.log" Oct 11 04:51:51 crc kubenswrapper[4651]: I1011 04:51:51.127880 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6zt9s_28e01c08-a461-4f44-a49c-4bf92fd3a2ce/ovnkube-controller/0.log" Oct 11 04:51:51 crc kubenswrapper[4651]: I1011 04:51:51.130883 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:51 crc kubenswrapper[4651]: I1011 04:51:51.130962 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:51 crc kubenswrapper[4651]: I1011 04:51:51.130989 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:51 crc kubenswrapper[4651]: I1011 04:51:51.131023 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:51 crc kubenswrapper[4651]: I1011 04:51:51.131046 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:51Z","lastTransitionTime":"2025-10-11T04:51:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:51 crc kubenswrapper[4651]: I1011 04:51:51.132386 4651 generic.go:334] "Generic (PLEG): container finished" podID="28e01c08-a461-4f44-a49c-4bf92fd3a2ce" containerID="4c5fa48d147adc4e46f3bbc60135517a4177c9db0a70c637de5eb499189d3912" exitCode=1 Oct 11 04:51:51 crc kubenswrapper[4651]: I1011 04:51:51.132448 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" event={"ID":"28e01c08-a461-4f44-a49c-4bf92fd3a2ce","Type":"ContainerDied","Data":"4c5fa48d147adc4e46f3bbc60135517a4177c9db0a70c637de5eb499189d3912"} Oct 11 04:51:51 crc kubenswrapper[4651]: I1011 04:51:51.132514 4651 scope.go:117] "RemoveContainer" containerID="508370f3dcd6abe4dbf8d3bcb4cf18954c12de8d7b30a7c3e5f1145bd9dbbe3d" Oct 11 04:51:51 crc kubenswrapper[4651]: I1011 04:51:51.133759 4651 scope.go:117] "RemoveContainer" containerID="4c5fa48d147adc4e46f3bbc60135517a4177c9db0a70c637de5eb499189d3912" Oct 11 04:51:51 crc kubenswrapper[4651]: E1011 04:51:51.134091 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-6zt9s_openshift-ovn-kubernetes(28e01c08-a461-4f44-a49c-4bf92fd3a2ce)\"" pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" podUID="28e01c08-a461-4f44-a49c-4bf92fd3a2ce" Oct 11 04:51:51 crc kubenswrapper[4651]: I1011 04:51:51.149271 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"519a1ae1-e964-48b0-8b61-835146df28c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f12e36a570de649e1df8107ea828cd8e65c18e111de191d3796fc1f3134e43c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wkk99\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://583486a7d6efd8d25aaeac78ea92f84834730c641516b6ade77aeda2baef58e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wkk99\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-78jnv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:51Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:51 crc kubenswrapper[4651]: I1011 04:51:51.169453 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wz4hw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fbfdd781-994b-49b4-9c8e-edc0ea4145d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d37b5524c49533e7dad8b61f0e20b7d1e37b7ae1afc5b9bfe9146a0744202a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c96nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wz4hw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:51Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:51 crc kubenswrapper[4651]: I1011 04:51:51.187218 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47b36edf-f918-4105-bb64-66bb71a4b8c4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9521c3af1580aa2a62fc760916de9dc4abdc437700567a6ae5df3a96da2e8942\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6fdcb798e11fca3b7b427f5ef0ac955a893709ffedaa472aaab6c219994e940\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff57b9678d77f617cfc792258091526c2a2f8125c240abfe0b15f693abc948eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11e12782179b96dac12bfdd3a25526e93f18da255e2d2fb0d7522e6d9c373b4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e14956f18479aa61fa0755b9c6c4f26ba7ac18f3023e177a76b9a59101b37c1\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-11T04:51:33Z\\\",\\\"message\\\":\\\"W1011 04:51:22.963111 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1011 04:51:22.963696 1 crypto.go:601] Generating new CA for check-endpoints-signer@1760158282 cert, and key in /tmp/serving-cert-3362973750/serving-signer.crt, /tmp/serving-cert-3362973750/serving-signer.key\\\\nI1011 04:51:23.197526 1 observer_polling.go:159] Starting file observer\\\\nW1011 04:51:23.200452 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1011 04:51:23.200572 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1011 04:51:23.202397 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3362973750/tls.crt::/tmp/serving-cert-3362973750/tls.key\\\\\\\"\\\\nF1011 04:51:33.422520 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f8633747abf191f040cb14f5a76529487743229338070aeca597fc93dae8b3f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55c5a1cac9b1045ca931771e5a096d29e8d986a4d9baae8d779d94f2a30b6cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55c5a1cac9b1045ca931771e5a096d29e8d986a4d9baae8d779d94f2a30b6cd3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:51Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:51 crc kubenswrapper[4651]: I1011 04:51:51.207509 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"744aa461-2868-440d-a1b9-6d30c0c50b56\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aabba06dd47ad528f830a47cc79ddabb3789d24ebed62a6e8f976df1b156ef1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb996f17546fc77ac1b8ed27428aaa6060822daa0156cf016dbb24ed278403a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76ee2612930be6596c0186cc000d80039b52e225fd64102002ed9316a0605521\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c6263dc47a8284e9849da1fb15c4788174252f3637665ac6e4b19298ca90c587\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:51Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:51 crc kubenswrapper[4651]: I1011 04:51:51.227332 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:51Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:51 crc kubenswrapper[4651]: I1011 04:51:51.233054 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:51 crc kubenswrapper[4651]: I1011 04:51:51.233093 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:51 crc kubenswrapper[4651]: I1011 04:51:51.233103 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:51 crc kubenswrapper[4651]: I1011 04:51:51.233118 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:51 crc kubenswrapper[4651]: I1011 04:51:51.233127 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:51Z","lastTransitionTime":"2025-10-11T04:51:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:51 crc kubenswrapper[4651]: I1011 04:51:51.243495 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-phsgk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e8fb74d-013d-4103-b029-e8416d079dcf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b7407b7da9820bbcdac4ce9c49c4520834a466b0db187c5f861972713ba1583f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4ftf8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-phsgk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:51Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:51 crc kubenswrapper[4651]: I1011 04:51:51.262102 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:51Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:51 crc kubenswrapper[4651]: I1011 04:51:51.274113 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:51Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:51 crc kubenswrapper[4651]: I1011 04:51:51.288269 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kwhmr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbcac3cb-b774-47ec-a86c-b22191d14d99\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52a0f9e11cf4e8c69d5b356b702ce9df8cbc935daadadf531004a23e5f6dad65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7n87\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kwhmr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:51Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:51 crc kubenswrapper[4651]: I1011 04:51:51.311345 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0fd0aaae2760a1c934aed54b8e07528f0f78db1149c9e1f8674bec90b61a7078\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://788d6a46d740de869fe7cd1d28479f8115a3515c7bd2482e06cbc4dba7b27fac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68954082d4369b9b4d95ba1f15e19593dc330b698562be413055b2ba012b5f8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3eed8335754437c935f07223bf4c4f40f9ab0bbdc9a86b7610f7f6fd55140e37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23b3fde15e637081cbf5fad2e55ccea1f0fe63d39ff5f82a60ce05ffeef9a837\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99f927067aabe4690e99fe50a069afdf68b22d950a8ae141235571ced468c44e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c5fa48d147adc4e46f3bbc60135517a4177c9db0a70c637de5eb499189d3912\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://508370f3dcd6abe4dbf8d3bcb4cf18954c12de8d7b30a7c3e5f1145bd9dbbe3d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-11T04:51:49Z\\\",\\\"message\\\":\\\"aselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1011 04:51:49.433512 5944 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1011 04:51:49.433523 5944 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1011 04:51:49.433531 5944 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1011 04:51:49.433894 5944 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1011 04:51:49.433960 5944 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1011 04:51:49.434048 5944 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1011 04:51:49.434066 5944 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1011 04:51:49.434091 5944 factory.go:656] Stopping watch factory\\\\nI1011 04:51:49.434091 5944 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1011 04:51:49.434124 5944 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1011 04:51:49.434236 5944 handler.go:208] Removed *v1.Node event handler 2\\\\nI1011 04:51:49.434250 5944 handler.go:208] Removed *v1.Node ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:46Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c5fa48d147adc4e46f3bbc60135517a4177c9db0a70c637de5eb499189d3912\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-11T04:51:51Z\\\",\\\"message\\\":\\\"dk6g\\\\nI1011 04:51:51.073982 6068 ovn.go:134] Ensuring zone local for Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g in node crc\\\\nF1011 04:51:51.073979 6068 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:51Z is after 2025-08-24T17:21:41Z]\\\\nI1011 04:51:51.073992 6068 model_client.go:382] Update operations generated as: [{Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.59 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {dce28c51-c9f1-478b-97c8-7e209d6e7cbe}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://add3ed68e0bf59bc569248a3660cfc9a20995641ec65b0bf7133e47c366aad73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccd6180f4c5665d62ee9458795318ef2474c1938d19e6cbfec9c2174fe10e020\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ccd6180f4c5665d62ee9458795318ef2474c1938d19e6cbfec9c2174fe10e020\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6zt9s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:51Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:51 crc kubenswrapper[4651]: I1011 04:51:51.329761 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pgwvb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"215170a8-84a9-4e15-9b0e-c1200c680f30\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a036609457cdb5a74a4ccf65ffa2e502f32994e6e3f1ef8a4fd4b89c5454773\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c80b1c5385cb92e140c43bc982ee7cda6406eac9dfe8d9449eaf45332ccc506\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c80b1c5385cb92e140c43bc982ee7cda6406eac9dfe8d9449eaf45332ccc506\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1575fc8fd03d4047a3ed1c0c9df99b77e40296b876d2913a790128f39733d45e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1575fc8fd03d4047a3ed1c0c9df99b77e40296b876d2913a790128f39733d45e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://626e511c110034060639be8565505dc122e834fc63a76d9c4e21dd1dbd5d5a62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://626e511c110034060639be8565505dc122e834fc63a76d9c4e21dd1dbd5d5a62\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9d55cd3f36dcad738ffd05b10e3f665c6bbb949c25365340e70b3c4b53e4175\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9d55cd3f36dcad738ffd05b10e3f665c6bbb949c25365340e70b3c4b53e4175\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27f664812354e7e2ea5feaa075c7f9f72262ddf0a7959196f7a7b14be5af1924\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://27f664812354e7e2ea5feaa075c7f9f72262ddf0a7959196f7a7b14be5af1924\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3b9eccb883d262047b87eae2358f6d27cbd8df5b6917125a8a6259b2eb3433a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3b9eccb883d262047b87eae2358f6d27cbd8df5b6917125a8a6259b2eb3433a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pgwvb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:51Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:51 crc kubenswrapper[4651]: I1011 04:51:51.335906 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:51 crc kubenswrapper[4651]: I1011 04:51:51.335951 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:51 crc kubenswrapper[4651]: I1011 04:51:51.335966 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:51 crc kubenswrapper[4651]: I1011 04:51:51.335984 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:51 crc kubenswrapper[4651]: I1011 04:51:51.335996 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:51Z","lastTransitionTime":"2025-10-11T04:51:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:51 crc kubenswrapper[4651]: I1011 04:51:51.345887 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d425a775a5e278ee77231764e58cac3441e04c383eee54f69ba84488ca640eae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:51Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:51 crc kubenswrapper[4651]: I1011 04:51:51.361968 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55e4c9c2d46f67bb0f8ccdb2de27dc82f0280476613b3b30d587ff6f5784e787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:51Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:51 crc kubenswrapper[4651]: I1011 04:51:51.376439 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e5b8526fed77f2e541d6793dc36a902daebce60d367f28f9efb45f7fabb44fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e278c9a5d14f026733737226a6ffc91202b760e48622d5d0031678981fa5f857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:51Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:51 crc kubenswrapper[4651]: I1011 04:51:51.439346 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:51 crc kubenswrapper[4651]: I1011 04:51:51.439415 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:51 crc kubenswrapper[4651]: I1011 04:51:51.439428 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:51 crc kubenswrapper[4651]: I1011 04:51:51.439446 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:51 crc kubenswrapper[4651]: I1011 04:51:51.439459 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:51Z","lastTransitionTime":"2025-10-11T04:51:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:51 crc kubenswrapper[4651]: I1011 04:51:51.542007 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:51 crc kubenswrapper[4651]: I1011 04:51:51.542080 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:51 crc kubenswrapper[4651]: I1011 04:51:51.542099 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:51 crc kubenswrapper[4651]: I1011 04:51:51.542125 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:51 crc kubenswrapper[4651]: I1011 04:51:51.542142 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:51Z","lastTransitionTime":"2025-10-11T04:51:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:51 crc kubenswrapper[4651]: I1011 04:51:51.645814 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:51 crc kubenswrapper[4651]: I1011 04:51:51.645911 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:51 crc kubenswrapper[4651]: I1011 04:51:51.645953 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:51 crc kubenswrapper[4651]: I1011 04:51:51.645976 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:51 crc kubenswrapper[4651]: I1011 04:51:51.645997 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:51Z","lastTransitionTime":"2025-10-11T04:51:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:51 crc kubenswrapper[4651]: I1011 04:51:51.747969 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:51 crc kubenswrapper[4651]: I1011 04:51:51.748014 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:51 crc kubenswrapper[4651]: I1011 04:51:51.748026 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:51 crc kubenswrapper[4651]: I1011 04:51:51.748043 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:51 crc kubenswrapper[4651]: I1011 04:51:51.748056 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:51Z","lastTransitionTime":"2025-10-11T04:51:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:51 crc kubenswrapper[4651]: I1011 04:51:51.851691 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:51 crc kubenswrapper[4651]: I1011 04:51:51.851742 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:51 crc kubenswrapper[4651]: I1011 04:51:51.851760 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:51 crc kubenswrapper[4651]: I1011 04:51:51.851786 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:51 crc kubenswrapper[4651]: I1011 04:51:51.851803 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:51Z","lastTransitionTime":"2025-10-11T04:51:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:51 crc kubenswrapper[4651]: I1011 04:51:51.869104 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 04:51:51 crc kubenswrapper[4651]: I1011 04:51:51.869157 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 04:51:51 crc kubenswrapper[4651]: E1011 04:51:51.869251 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 04:51:51 crc kubenswrapper[4651]: E1011 04:51:51.869431 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 04:51:51 crc kubenswrapper[4651]: I1011 04:51:51.954502 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:51 crc kubenswrapper[4651]: I1011 04:51:51.954562 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:51 crc kubenswrapper[4651]: I1011 04:51:51.954578 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:51 crc kubenswrapper[4651]: I1011 04:51:51.954603 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:51 crc kubenswrapper[4651]: I1011 04:51:51.954620 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:51Z","lastTransitionTime":"2025-10-11T04:51:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:52 crc kubenswrapper[4651]: I1011 04:51:52.058052 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:52 crc kubenswrapper[4651]: I1011 04:51:52.058111 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:52 crc kubenswrapper[4651]: I1011 04:51:52.058128 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:52 crc kubenswrapper[4651]: I1011 04:51:52.058154 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:52 crc kubenswrapper[4651]: I1011 04:51:52.058172 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:52Z","lastTransitionTime":"2025-10-11T04:51:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:52 crc kubenswrapper[4651]: I1011 04:51:52.139521 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6zt9s_28e01c08-a461-4f44-a49c-4bf92fd3a2ce/ovnkube-controller/1.log" Oct 11 04:51:52 crc kubenswrapper[4651]: I1011 04:51:52.161110 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:52 crc kubenswrapper[4651]: I1011 04:51:52.161255 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:52 crc kubenswrapper[4651]: I1011 04:51:52.161316 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:52 crc kubenswrapper[4651]: I1011 04:51:52.161418 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:52 crc kubenswrapper[4651]: I1011 04:51:52.161478 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:52Z","lastTransitionTime":"2025-10-11T04:51:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:52 crc kubenswrapper[4651]: I1011 04:51:52.264335 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:52 crc kubenswrapper[4651]: I1011 04:51:52.264472 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:52 crc kubenswrapper[4651]: I1011 04:51:52.264532 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:52 crc kubenswrapper[4651]: I1011 04:51:52.264558 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:52 crc kubenswrapper[4651]: I1011 04:51:52.264575 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:52Z","lastTransitionTime":"2025-10-11T04:51:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:52 crc kubenswrapper[4651]: I1011 04:51:52.367555 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:52 crc kubenswrapper[4651]: I1011 04:51:52.367618 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:52 crc kubenswrapper[4651]: I1011 04:51:52.367627 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:52 crc kubenswrapper[4651]: I1011 04:51:52.367641 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:52 crc kubenswrapper[4651]: I1011 04:51:52.367674 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:52Z","lastTransitionTime":"2025-10-11T04:51:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:52 crc kubenswrapper[4651]: I1011 04:51:52.470675 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:52 crc kubenswrapper[4651]: I1011 04:51:52.470746 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:52 crc kubenswrapper[4651]: I1011 04:51:52.470756 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:52 crc kubenswrapper[4651]: I1011 04:51:52.470771 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:52 crc kubenswrapper[4651]: I1011 04:51:52.470779 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:52Z","lastTransitionTime":"2025-10-11T04:51:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:52 crc kubenswrapper[4651]: I1011 04:51:52.572749 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:52 crc kubenswrapper[4651]: I1011 04:51:52.572793 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:52 crc kubenswrapper[4651]: I1011 04:51:52.572805 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:52 crc kubenswrapper[4651]: I1011 04:51:52.572875 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:52 crc kubenswrapper[4651]: I1011 04:51:52.572891 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:52Z","lastTransitionTime":"2025-10-11T04:51:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:52 crc kubenswrapper[4651]: I1011 04:51:52.676371 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:52 crc kubenswrapper[4651]: I1011 04:51:52.676429 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:52 crc kubenswrapper[4651]: I1011 04:51:52.676441 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:52 crc kubenswrapper[4651]: I1011 04:51:52.676463 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:52 crc kubenswrapper[4651]: I1011 04:51:52.676475 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:52Z","lastTransitionTime":"2025-10-11T04:51:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:52 crc kubenswrapper[4651]: I1011 04:51:52.780008 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:52 crc kubenswrapper[4651]: I1011 04:51:52.780070 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:52 crc kubenswrapper[4651]: I1011 04:51:52.780088 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:52 crc kubenswrapper[4651]: I1011 04:51:52.780112 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:52 crc kubenswrapper[4651]: I1011 04:51:52.780129 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:52Z","lastTransitionTime":"2025-10-11T04:51:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:52 crc kubenswrapper[4651]: I1011 04:51:52.799346 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qsgwc"] Oct 11 04:51:52 crc kubenswrapper[4651]: I1011 04:51:52.800120 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qsgwc" Oct 11 04:51:52 crc kubenswrapper[4651]: I1011 04:51:52.804192 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Oct 11 04:51:52 crc kubenswrapper[4651]: I1011 04:51:52.804304 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Oct 11 04:51:52 crc kubenswrapper[4651]: I1011 04:51:52.807624 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pbjvz\" (UniqueName: \"kubernetes.io/projected/2f5830d7-3c6f-48f0-b103-a228e7f8e448-kube-api-access-pbjvz\") pod \"ovnkube-control-plane-749d76644c-qsgwc\" (UID: \"2f5830d7-3c6f-48f0-b103-a228e7f8e448\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qsgwc" Oct 11 04:51:52 crc kubenswrapper[4651]: I1011 04:51:52.807709 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/2f5830d7-3c6f-48f0-b103-a228e7f8e448-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-qsgwc\" (UID: \"2f5830d7-3c6f-48f0-b103-a228e7f8e448\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qsgwc" Oct 11 04:51:52 crc kubenswrapper[4651]: I1011 04:51:52.807803 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/2f5830d7-3c6f-48f0-b103-a228e7f8e448-env-overrides\") pod \"ovnkube-control-plane-749d76644c-qsgwc\" (UID: \"2f5830d7-3c6f-48f0-b103-a228e7f8e448\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qsgwc" Oct 11 04:51:52 crc kubenswrapper[4651]: I1011 04:51:52.807898 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/2f5830d7-3c6f-48f0-b103-a228e7f8e448-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-qsgwc\" (UID: \"2f5830d7-3c6f-48f0-b103-a228e7f8e448\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qsgwc" Oct 11 04:51:52 crc kubenswrapper[4651]: I1011 04:51:52.826236 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:52Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:52 crc kubenswrapper[4651]: I1011 04:51:52.844052 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"519a1ae1-e964-48b0-8b61-835146df28c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f12e36a570de649e1df8107ea828cd8e65c18e111de191d3796fc1f3134e43c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wkk99\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://583486a7d6efd8d25aaeac78ea92f84834730c641516b6ade77aeda2baef58e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wkk99\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-78jnv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:52Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:52 crc kubenswrapper[4651]: I1011 04:51:52.863502 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wz4hw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fbfdd781-994b-49b4-9c8e-edc0ea4145d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d37b5524c49533e7dad8b61f0e20b7d1e37b7ae1afc5b9bfe9146a0744202a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c96nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wz4hw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:52Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:52 crc kubenswrapper[4651]: I1011 04:51:52.869482 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 04:51:52 crc kubenswrapper[4651]: E1011 04:51:52.869637 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 04:51:52 crc kubenswrapper[4651]: I1011 04:51:52.883101 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:52 crc kubenswrapper[4651]: I1011 04:51:52.883214 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:52 crc kubenswrapper[4651]: I1011 04:51:52.883236 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:52 crc kubenswrapper[4651]: I1011 04:51:52.883258 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:52 crc kubenswrapper[4651]: I1011 04:51:52.883275 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:52Z","lastTransitionTime":"2025-10-11T04:51:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:52 crc kubenswrapper[4651]: I1011 04:51:52.884962 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47b36edf-f918-4105-bb64-66bb71a4b8c4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9521c3af1580aa2a62fc760916de9dc4abdc437700567a6ae5df3a96da2e8942\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6fdcb798e11fca3b7b427f5ef0ac955a893709ffedaa472aaab6c219994e940\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff57b9678d77f617cfc792258091526c2a2f8125c240abfe0b15f693abc948eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11e12782179b96dac12bfdd3a25526e93f18da255e2d2fb0d7522e6d9c373b4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e14956f18479aa61fa0755b9c6c4f26ba7ac18f3023e177a76b9a59101b37c1\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-11T04:51:33Z\\\",\\\"message\\\":\\\"W1011 04:51:22.963111 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1011 04:51:22.963696 1 crypto.go:601] Generating new CA for check-endpoints-signer@1760158282 cert, and key in /tmp/serving-cert-3362973750/serving-signer.crt, /tmp/serving-cert-3362973750/serving-signer.key\\\\nI1011 04:51:23.197526 1 observer_polling.go:159] Starting file observer\\\\nW1011 04:51:23.200452 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1011 04:51:23.200572 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1011 04:51:23.202397 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3362973750/tls.crt::/tmp/serving-cert-3362973750/tls.key\\\\\\\"\\\\nF1011 04:51:33.422520 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f8633747abf191f040cb14f5a76529487743229338070aeca597fc93dae8b3f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55c5a1cac9b1045ca931771e5a096d29e8d986a4d9baae8d779d94f2a30b6cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55c5a1cac9b1045ca931771e5a096d29e8d986a4d9baae8d779d94f2a30b6cd3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:52Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:52 crc kubenswrapper[4651]: I1011 04:51:52.901880 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"744aa461-2868-440d-a1b9-6d30c0c50b56\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aabba06dd47ad528f830a47cc79ddabb3789d24ebed62a6e8f976df1b156ef1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb996f17546fc77ac1b8ed27428aaa6060822daa0156cf016dbb24ed278403a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76ee2612930be6596c0186cc000d80039b52e225fd64102002ed9316a0605521\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c6263dc47a8284e9849da1fb15c4788174252f3637665ac6e4b19298ca90c587\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:52Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:52 crc kubenswrapper[4651]: I1011 04:51:52.909125 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/2f5830d7-3c6f-48f0-b103-a228e7f8e448-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-qsgwc\" (UID: \"2f5830d7-3c6f-48f0-b103-a228e7f8e448\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qsgwc" Oct 11 04:51:52 crc kubenswrapper[4651]: I1011 04:51:52.909205 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pbjvz\" (UniqueName: \"kubernetes.io/projected/2f5830d7-3c6f-48f0-b103-a228e7f8e448-kube-api-access-pbjvz\") pod \"ovnkube-control-plane-749d76644c-qsgwc\" (UID: \"2f5830d7-3c6f-48f0-b103-a228e7f8e448\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qsgwc" Oct 11 04:51:52 crc kubenswrapper[4651]: I1011 04:51:52.909301 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/2f5830d7-3c6f-48f0-b103-a228e7f8e448-env-overrides\") pod \"ovnkube-control-plane-749d76644c-qsgwc\" (UID: \"2f5830d7-3c6f-48f0-b103-a228e7f8e448\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qsgwc" Oct 11 04:51:52 crc kubenswrapper[4651]: I1011 04:51:52.909358 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/2f5830d7-3c6f-48f0-b103-a228e7f8e448-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-qsgwc\" (UID: \"2f5830d7-3c6f-48f0-b103-a228e7f8e448\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qsgwc" Oct 11 04:51:52 crc kubenswrapper[4651]: I1011 04:51:52.910949 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/2f5830d7-3c6f-48f0-b103-a228e7f8e448-env-overrides\") pod \"ovnkube-control-plane-749d76644c-qsgwc\" (UID: \"2f5830d7-3c6f-48f0-b103-a228e7f8e448\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qsgwc" Oct 11 04:51:52 crc kubenswrapper[4651]: I1011 04:51:52.911128 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/2f5830d7-3c6f-48f0-b103-a228e7f8e448-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-qsgwc\" (UID: \"2f5830d7-3c6f-48f0-b103-a228e7f8e448\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qsgwc" Oct 11 04:51:52 crc kubenswrapper[4651]: I1011 04:51:52.916007 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-phsgk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e8fb74d-013d-4103-b029-e8416d079dcf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b7407b7da9820bbcdac4ce9c49c4520834a466b0db187c5f861972713ba1583f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4ftf8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-phsgk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:52Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:52 crc kubenswrapper[4651]: I1011 04:51:52.921116 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/2f5830d7-3c6f-48f0-b103-a228e7f8e448-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-qsgwc\" (UID: \"2f5830d7-3c6f-48f0-b103-a228e7f8e448\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qsgwc" Oct 11 04:51:52 crc kubenswrapper[4651]: I1011 04:51:52.936707 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:52Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:52 crc kubenswrapper[4651]: I1011 04:51:52.941874 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pbjvz\" (UniqueName: \"kubernetes.io/projected/2f5830d7-3c6f-48f0-b103-a228e7f8e448-kube-api-access-pbjvz\") pod \"ovnkube-control-plane-749d76644c-qsgwc\" (UID: \"2f5830d7-3c6f-48f0-b103-a228e7f8e448\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qsgwc" Oct 11 04:51:52 crc kubenswrapper[4651]: I1011 04:51:52.961614 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pgwvb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"215170a8-84a9-4e15-9b0e-c1200c680f30\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a036609457cdb5a74a4ccf65ffa2e502f32994e6e3f1ef8a4fd4b89c5454773\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c80b1c5385cb92e140c43bc982ee7cda6406eac9dfe8d9449eaf45332ccc506\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c80b1c5385cb92e140c43bc982ee7cda6406eac9dfe8d9449eaf45332ccc506\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1575fc8fd03d4047a3ed1c0c9df99b77e40296b876d2913a790128f39733d45e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1575fc8fd03d4047a3ed1c0c9df99b77e40296b876d2913a790128f39733d45e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://626e511c110034060639be8565505dc122e834fc63a76d9c4e21dd1dbd5d5a62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://626e511c110034060639be8565505dc122e834fc63a76d9c4e21dd1dbd5d5a62\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9d55cd3f36dcad738ffd05b10e3f665c6bbb949c25365340e70b3c4b53e4175\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9d55cd3f36dcad738ffd05b10e3f665c6bbb949c25365340e70b3c4b53e4175\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27f664812354e7e2ea5feaa075c7f9f72262ddf0a7959196f7a7b14be5af1924\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://27f664812354e7e2ea5feaa075c7f9f72262ddf0a7959196f7a7b14be5af1924\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3b9eccb883d262047b87eae2358f6d27cbd8df5b6917125a8a6259b2eb3433a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3b9eccb883d262047b87eae2358f6d27cbd8df5b6917125a8a6259b2eb3433a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pgwvb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:52Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:52 crc kubenswrapper[4651]: I1011 04:51:52.977589 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qsgwc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f5830d7-3c6f-48f0-b103-a228e7f8e448\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pbjvz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pbjvz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:52Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qsgwc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:52Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:52 crc kubenswrapper[4651]: I1011 04:51:52.986484 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:52 crc kubenswrapper[4651]: I1011 04:51:52.986537 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:52 crc kubenswrapper[4651]: I1011 04:51:52.986558 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:52 crc kubenswrapper[4651]: I1011 04:51:52.986584 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:52 crc kubenswrapper[4651]: I1011 04:51:52.986631 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:52Z","lastTransitionTime":"2025-10-11T04:51:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:52 crc kubenswrapper[4651]: I1011 04:51:52.998918 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:52Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:53 crc kubenswrapper[4651]: I1011 04:51:53.014206 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kwhmr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbcac3cb-b774-47ec-a86c-b22191d14d99\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52a0f9e11cf4e8c69d5b356b702ce9df8cbc935daadadf531004a23e5f6dad65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7n87\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kwhmr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:53Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:53 crc kubenswrapper[4651]: I1011 04:51:53.041718 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0fd0aaae2760a1c934aed54b8e07528f0f78db1149c9e1f8674bec90b61a7078\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://788d6a46d740de869fe7cd1d28479f8115a3515c7bd2482e06cbc4dba7b27fac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68954082d4369b9b4d95ba1f15e19593dc330b698562be413055b2ba012b5f8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3eed8335754437c935f07223bf4c4f40f9ab0bbdc9a86b7610f7f6fd55140e37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23b3fde15e637081cbf5fad2e55ccea1f0fe63d39ff5f82a60ce05ffeef9a837\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99f927067aabe4690e99fe50a069afdf68b22d950a8ae141235571ced468c44e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c5fa48d147adc4e46f3bbc60135517a4177c9db0a70c637de5eb499189d3912\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://508370f3dcd6abe4dbf8d3bcb4cf18954c12de8d7b30a7c3e5f1145bd9dbbe3d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-11T04:51:49Z\\\",\\\"message\\\":\\\"aselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1011 04:51:49.433512 5944 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1011 04:51:49.433523 5944 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1011 04:51:49.433531 5944 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1011 04:51:49.433894 5944 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1011 04:51:49.433960 5944 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1011 04:51:49.434048 5944 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1011 04:51:49.434066 5944 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1011 04:51:49.434091 5944 factory.go:656] Stopping watch factory\\\\nI1011 04:51:49.434091 5944 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1011 04:51:49.434124 5944 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1011 04:51:49.434236 5944 handler.go:208] Removed *v1.Node event handler 2\\\\nI1011 04:51:49.434250 5944 handler.go:208] Removed *v1.Node ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:46Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c5fa48d147adc4e46f3bbc60135517a4177c9db0a70c637de5eb499189d3912\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-11T04:51:51Z\\\",\\\"message\\\":\\\"dk6g\\\\nI1011 04:51:51.073982 6068 ovn.go:134] Ensuring zone local for Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g in node crc\\\\nF1011 04:51:51.073979 6068 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:51Z is after 2025-08-24T17:21:41Z]\\\\nI1011 04:51:51.073992 6068 model_client.go:382] Update operations generated as: [{Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.59 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {dce28c51-c9f1-478b-97c8-7e209d6e7cbe}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://add3ed68e0bf59bc569248a3660cfc9a20995641ec65b0bf7133e47c366aad73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccd6180f4c5665d62ee9458795318ef2474c1938d19e6cbfec9c2174fe10e020\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ccd6180f4c5665d62ee9458795318ef2474c1938d19e6cbfec9c2174fe10e020\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6zt9s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:53Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:53 crc kubenswrapper[4651]: I1011 04:51:53.060866 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d425a775a5e278ee77231764e58cac3441e04c383eee54f69ba84488ca640eae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:53Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:53 crc kubenswrapper[4651]: I1011 04:51:53.077218 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55e4c9c2d46f67bb0f8ccdb2de27dc82f0280476613b3b30d587ff6f5784e787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:53Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:53 crc kubenswrapper[4651]: I1011 04:51:53.088762 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:53 crc kubenswrapper[4651]: I1011 04:51:53.088847 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:53 crc kubenswrapper[4651]: I1011 04:51:53.088859 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:53 crc kubenswrapper[4651]: I1011 04:51:53.088875 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:53 crc kubenswrapper[4651]: I1011 04:51:53.088889 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:53Z","lastTransitionTime":"2025-10-11T04:51:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:53 crc kubenswrapper[4651]: I1011 04:51:53.096590 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e5b8526fed77f2e541d6793dc36a902daebce60d367f28f9efb45f7fabb44fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e278c9a5d14f026733737226a6ffc91202b760e48622d5d0031678981fa5f857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:53Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:53 crc kubenswrapper[4651]: I1011 04:51:53.121385 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qsgwc" Oct 11 04:51:53 crc kubenswrapper[4651]: W1011 04:51:53.143948 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2f5830d7_3c6f_48f0_b103_a228e7f8e448.slice/crio-8ab3f7901176e8d14fde0ed67fce0b1d2eda0cdd337c217401ff08da4a38c219 WatchSource:0}: Error finding container 8ab3f7901176e8d14fde0ed67fce0b1d2eda0cdd337c217401ff08da4a38c219: Status 404 returned error can't find the container with id 8ab3f7901176e8d14fde0ed67fce0b1d2eda0cdd337c217401ff08da4a38c219 Oct 11 04:51:53 crc kubenswrapper[4651]: I1011 04:51:53.191631 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:53 crc kubenswrapper[4651]: I1011 04:51:53.191680 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:53 crc kubenswrapper[4651]: I1011 04:51:53.191694 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:53 crc kubenswrapper[4651]: I1011 04:51:53.191714 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:53 crc kubenswrapper[4651]: I1011 04:51:53.191728 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:53Z","lastTransitionTime":"2025-10-11T04:51:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:53 crc kubenswrapper[4651]: I1011 04:51:53.293284 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:53 crc kubenswrapper[4651]: I1011 04:51:53.293315 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:53 crc kubenswrapper[4651]: I1011 04:51:53.293322 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:53 crc kubenswrapper[4651]: I1011 04:51:53.293335 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:53 crc kubenswrapper[4651]: I1011 04:51:53.293345 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:53Z","lastTransitionTime":"2025-10-11T04:51:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:53 crc kubenswrapper[4651]: I1011 04:51:53.395514 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:53 crc kubenswrapper[4651]: I1011 04:51:53.395569 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:53 crc kubenswrapper[4651]: I1011 04:51:53.395586 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:53 crc kubenswrapper[4651]: I1011 04:51:53.395609 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:53 crc kubenswrapper[4651]: I1011 04:51:53.395627 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:53Z","lastTransitionTime":"2025-10-11T04:51:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:53 crc kubenswrapper[4651]: I1011 04:51:53.499099 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:53 crc kubenswrapper[4651]: I1011 04:51:53.499143 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:53 crc kubenswrapper[4651]: I1011 04:51:53.499157 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:53 crc kubenswrapper[4651]: I1011 04:51:53.499179 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:53 crc kubenswrapper[4651]: I1011 04:51:53.499192 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:53Z","lastTransitionTime":"2025-10-11T04:51:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:53 crc kubenswrapper[4651]: I1011 04:51:53.571465 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-tgvv8"] Oct 11 04:51:53 crc kubenswrapper[4651]: I1011 04:51:53.572081 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tgvv8" Oct 11 04:51:53 crc kubenswrapper[4651]: E1011 04:51:53.572161 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tgvv8" podUID="a551fed8-58fb-48ae-88af-8dc0cb48fc30" Oct 11 04:51:53 crc kubenswrapper[4651]: I1011 04:51:53.589078 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d425a775a5e278ee77231764e58cac3441e04c383eee54f69ba84488ca640eae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:53Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:53 crc kubenswrapper[4651]: I1011 04:51:53.602415 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:53 crc kubenswrapper[4651]: I1011 04:51:53.602455 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:53 crc kubenswrapper[4651]: I1011 04:51:53.602469 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:53 crc kubenswrapper[4651]: I1011 04:51:53.602487 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:53 crc kubenswrapper[4651]: I1011 04:51:53.602500 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:53Z","lastTransitionTime":"2025-10-11T04:51:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:53 crc kubenswrapper[4651]: I1011 04:51:53.614718 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55e4c9c2d46f67bb0f8ccdb2de27dc82f0280476613b3b30d587ff6f5784e787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:53Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:53 crc kubenswrapper[4651]: I1011 04:51:53.615311 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a551fed8-58fb-48ae-88af-8dc0cb48fc30-metrics-certs\") pod \"network-metrics-daemon-tgvv8\" (UID: \"a551fed8-58fb-48ae-88af-8dc0cb48fc30\") " pod="openshift-multus/network-metrics-daemon-tgvv8" Oct 11 04:51:53 crc kubenswrapper[4651]: I1011 04:51:53.615358 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nphhq\" (UniqueName: \"kubernetes.io/projected/a551fed8-58fb-48ae-88af-8dc0cb48fc30-kube-api-access-nphhq\") pod \"network-metrics-daemon-tgvv8\" (UID: \"a551fed8-58fb-48ae-88af-8dc0cb48fc30\") " pod="openshift-multus/network-metrics-daemon-tgvv8" Oct 11 04:51:53 crc kubenswrapper[4651]: I1011 04:51:53.632251 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e5b8526fed77f2e541d6793dc36a902daebce60d367f28f9efb45f7fabb44fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e278c9a5d14f026733737226a6ffc91202b760e48622d5d0031678981fa5f857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:53Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:53 crc kubenswrapper[4651]: I1011 04:51:53.649168 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wz4hw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fbfdd781-994b-49b4-9c8e-edc0ea4145d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d37b5524c49533e7dad8b61f0e20b7d1e37b7ae1afc5b9bfe9146a0744202a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c96nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wz4hw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:53Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:53 crc kubenswrapper[4651]: I1011 04:51:53.661578 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47b36edf-f918-4105-bb64-66bb71a4b8c4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9521c3af1580aa2a62fc760916de9dc4abdc437700567a6ae5df3a96da2e8942\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6fdcb798e11fca3b7b427f5ef0ac955a893709ffedaa472aaab6c219994e940\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff57b9678d77f617cfc792258091526c2a2f8125c240abfe0b15f693abc948eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11e12782179b96dac12bfdd3a25526e93f18da255e2d2fb0d7522e6d9c373b4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e14956f18479aa61fa0755b9c6c4f26ba7ac18f3023e177a76b9a59101b37c1\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-11T04:51:33Z\\\",\\\"message\\\":\\\"W1011 04:51:22.963111 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1011 04:51:22.963696 1 crypto.go:601] Generating new CA for check-endpoints-signer@1760158282 cert, and key in /tmp/serving-cert-3362973750/serving-signer.crt, /tmp/serving-cert-3362973750/serving-signer.key\\\\nI1011 04:51:23.197526 1 observer_polling.go:159] Starting file observer\\\\nW1011 04:51:23.200452 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1011 04:51:23.200572 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1011 04:51:23.202397 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3362973750/tls.crt::/tmp/serving-cert-3362973750/tls.key\\\\\\\"\\\\nF1011 04:51:33.422520 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f8633747abf191f040cb14f5a76529487743229338070aeca597fc93dae8b3f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55c5a1cac9b1045ca931771e5a096d29e8d986a4d9baae8d779d94f2a30b6cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55c5a1cac9b1045ca931771e5a096d29e8d986a4d9baae8d779d94f2a30b6cd3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:53Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:53 crc kubenswrapper[4651]: I1011 04:51:53.673809 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"744aa461-2868-440d-a1b9-6d30c0c50b56\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aabba06dd47ad528f830a47cc79ddabb3789d24ebed62a6e8f976df1b156ef1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb996f17546fc77ac1b8ed27428aaa6060822daa0156cf016dbb24ed278403a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76ee2612930be6596c0186cc000d80039b52e225fd64102002ed9316a0605521\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c6263dc47a8284e9849da1fb15c4788174252f3637665ac6e4b19298ca90c587\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:53Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:53 crc kubenswrapper[4651]: I1011 04:51:53.687223 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:53Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:53 crc kubenswrapper[4651]: I1011 04:51:53.699568 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"519a1ae1-e964-48b0-8b61-835146df28c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f12e36a570de649e1df8107ea828cd8e65c18e111de191d3796fc1f3134e43c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wkk99\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://583486a7d6efd8d25aaeac78ea92f84834730c641516b6ade77aeda2baef58e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wkk99\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-78jnv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:53Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:53 crc kubenswrapper[4651]: I1011 04:51:53.704072 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:53 crc kubenswrapper[4651]: I1011 04:51:53.704103 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:53 crc kubenswrapper[4651]: I1011 04:51:53.704111 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:53 crc kubenswrapper[4651]: I1011 04:51:53.704126 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:53 crc kubenswrapper[4651]: I1011 04:51:53.704136 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:53Z","lastTransitionTime":"2025-10-11T04:51:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:53 crc kubenswrapper[4651]: I1011 04:51:53.710322 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-phsgk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e8fb74d-013d-4103-b029-e8416d079dcf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b7407b7da9820bbcdac4ce9c49c4520834a466b0db187c5f861972713ba1583f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4ftf8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-phsgk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:53Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:53 crc kubenswrapper[4651]: I1011 04:51:53.715897 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a551fed8-58fb-48ae-88af-8dc0cb48fc30-metrics-certs\") pod \"network-metrics-daemon-tgvv8\" (UID: \"a551fed8-58fb-48ae-88af-8dc0cb48fc30\") " pod="openshift-multus/network-metrics-daemon-tgvv8" Oct 11 04:51:53 crc kubenswrapper[4651]: I1011 04:51:53.715973 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nphhq\" (UniqueName: \"kubernetes.io/projected/a551fed8-58fb-48ae-88af-8dc0cb48fc30-kube-api-access-nphhq\") pod \"network-metrics-daemon-tgvv8\" (UID: \"a551fed8-58fb-48ae-88af-8dc0cb48fc30\") " pod="openshift-multus/network-metrics-daemon-tgvv8" Oct 11 04:51:53 crc kubenswrapper[4651]: E1011 04:51:53.716120 4651 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 11 04:51:53 crc kubenswrapper[4651]: E1011 04:51:53.716215 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a551fed8-58fb-48ae-88af-8dc0cb48fc30-metrics-certs podName:a551fed8-58fb-48ae-88af-8dc0cb48fc30 nodeName:}" failed. No retries permitted until 2025-10-11 04:51:54.216192063 +0000 UTC m=+35.112424899 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/a551fed8-58fb-48ae-88af-8dc0cb48fc30-metrics-certs") pod "network-metrics-daemon-tgvv8" (UID: "a551fed8-58fb-48ae-88af-8dc0cb48fc30") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 11 04:51:53 crc kubenswrapper[4651]: I1011 04:51:53.722474 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:53Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:53 crc kubenswrapper[4651]: I1011 04:51:53.735839 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-tgvv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a551fed8-58fb-48ae-88af-8dc0cb48fc30\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:53Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:53Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:53Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nphhq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nphhq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:53Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-tgvv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:53Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:53 crc kubenswrapper[4651]: I1011 04:51:53.737344 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nphhq\" (UniqueName: \"kubernetes.io/projected/a551fed8-58fb-48ae-88af-8dc0cb48fc30-kube-api-access-nphhq\") pod \"network-metrics-daemon-tgvv8\" (UID: \"a551fed8-58fb-48ae-88af-8dc0cb48fc30\") " pod="openshift-multus/network-metrics-daemon-tgvv8" Oct 11 04:51:53 crc kubenswrapper[4651]: I1011 04:51:53.748866 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:53Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:53 crc kubenswrapper[4651]: I1011 04:51:53.759563 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kwhmr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbcac3cb-b774-47ec-a86c-b22191d14d99\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52a0f9e11cf4e8c69d5b356b702ce9df8cbc935daadadf531004a23e5f6dad65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7n87\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kwhmr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:53Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:53 crc kubenswrapper[4651]: I1011 04:51:53.780059 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0fd0aaae2760a1c934aed54b8e07528f0f78db1149c9e1f8674bec90b61a7078\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://788d6a46d740de869fe7cd1d28479f8115a3515c7bd2482e06cbc4dba7b27fac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68954082d4369b9b4d95ba1f15e19593dc330b698562be413055b2ba012b5f8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3eed8335754437c935f07223bf4c4f40f9ab0bbdc9a86b7610f7f6fd55140e37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23b3fde15e637081cbf5fad2e55ccea1f0fe63d39ff5f82a60ce05ffeef9a837\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99f927067aabe4690e99fe50a069afdf68b22d950a8ae141235571ced468c44e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c5fa48d147adc4e46f3bbc60135517a4177c9db0a70c637de5eb499189d3912\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://508370f3dcd6abe4dbf8d3bcb4cf18954c12de8d7b30a7c3e5f1145bd9dbbe3d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-11T04:51:49Z\\\",\\\"message\\\":\\\"aselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1011 04:51:49.433512 5944 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1011 04:51:49.433523 5944 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1011 04:51:49.433531 5944 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1011 04:51:49.433894 5944 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1011 04:51:49.433960 5944 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1011 04:51:49.434048 5944 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1011 04:51:49.434066 5944 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1011 04:51:49.434091 5944 factory.go:656] Stopping watch factory\\\\nI1011 04:51:49.434091 5944 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1011 04:51:49.434124 5944 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1011 04:51:49.434236 5944 handler.go:208] Removed *v1.Node event handler 2\\\\nI1011 04:51:49.434250 5944 handler.go:208] Removed *v1.Node ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:46Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c5fa48d147adc4e46f3bbc60135517a4177c9db0a70c637de5eb499189d3912\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-11T04:51:51Z\\\",\\\"message\\\":\\\"dk6g\\\\nI1011 04:51:51.073982 6068 ovn.go:134] Ensuring zone local for Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g in node crc\\\\nF1011 04:51:51.073979 6068 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:51Z is after 2025-08-24T17:21:41Z]\\\\nI1011 04:51:51.073992 6068 model_client.go:382] Update operations generated as: [{Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.59 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {dce28c51-c9f1-478b-97c8-7e209d6e7cbe}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://add3ed68e0bf59bc569248a3660cfc9a20995641ec65b0bf7133e47c366aad73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccd6180f4c5665d62ee9458795318ef2474c1938d19e6cbfec9c2174fe10e020\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ccd6180f4c5665d62ee9458795318ef2474c1938d19e6cbfec9c2174fe10e020\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6zt9s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:53Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:53 crc kubenswrapper[4651]: I1011 04:51:53.797025 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pgwvb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"215170a8-84a9-4e15-9b0e-c1200c680f30\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a036609457cdb5a74a4ccf65ffa2e502f32994e6e3f1ef8a4fd4b89c5454773\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c80b1c5385cb92e140c43bc982ee7cda6406eac9dfe8d9449eaf45332ccc506\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c80b1c5385cb92e140c43bc982ee7cda6406eac9dfe8d9449eaf45332ccc506\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1575fc8fd03d4047a3ed1c0c9df99b77e40296b876d2913a790128f39733d45e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1575fc8fd03d4047a3ed1c0c9df99b77e40296b876d2913a790128f39733d45e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://626e511c110034060639be8565505dc122e834fc63a76d9c4e21dd1dbd5d5a62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://626e511c110034060639be8565505dc122e834fc63a76d9c4e21dd1dbd5d5a62\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9d55cd3f36dcad738ffd05b10e3f665c6bbb949c25365340e70b3c4b53e4175\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9d55cd3f36dcad738ffd05b10e3f665c6bbb949c25365340e70b3c4b53e4175\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27f664812354e7e2ea5feaa075c7f9f72262ddf0a7959196f7a7b14be5af1924\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://27f664812354e7e2ea5feaa075c7f9f72262ddf0a7959196f7a7b14be5af1924\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3b9eccb883d262047b87eae2358f6d27cbd8df5b6917125a8a6259b2eb3433a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3b9eccb883d262047b87eae2358f6d27cbd8df5b6917125a8a6259b2eb3433a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pgwvb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:53Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:53 crc kubenswrapper[4651]: I1011 04:51:53.807099 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:53 crc kubenswrapper[4651]: I1011 04:51:53.807142 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:53 crc kubenswrapper[4651]: I1011 04:51:53.807150 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:53 crc kubenswrapper[4651]: I1011 04:51:53.807164 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:53 crc kubenswrapper[4651]: I1011 04:51:53.807173 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:53Z","lastTransitionTime":"2025-10-11T04:51:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:53 crc kubenswrapper[4651]: I1011 04:51:53.812286 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qsgwc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f5830d7-3c6f-48f0-b103-a228e7f8e448\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pbjvz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pbjvz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:52Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qsgwc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:53Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:53 crc kubenswrapper[4651]: I1011 04:51:53.868897 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 04:51:53 crc kubenswrapper[4651]: I1011 04:51:53.868920 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 04:51:53 crc kubenswrapper[4651]: E1011 04:51:53.869035 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 04:51:53 crc kubenswrapper[4651]: E1011 04:51:53.869239 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 04:51:53 crc kubenswrapper[4651]: I1011 04:51:53.909446 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:53 crc kubenswrapper[4651]: I1011 04:51:53.909517 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:53 crc kubenswrapper[4651]: I1011 04:51:53.909535 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:53 crc kubenswrapper[4651]: I1011 04:51:53.909561 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:53 crc kubenswrapper[4651]: I1011 04:51:53.909578 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:53Z","lastTransitionTime":"2025-10-11T04:51:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:54 crc kubenswrapper[4651]: I1011 04:51:54.015556 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:54 crc kubenswrapper[4651]: I1011 04:51:54.015586 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:54 crc kubenswrapper[4651]: I1011 04:51:54.015596 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:54 crc kubenswrapper[4651]: I1011 04:51:54.015607 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:54 crc kubenswrapper[4651]: I1011 04:51:54.015615 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:54Z","lastTransitionTime":"2025-10-11T04:51:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:54 crc kubenswrapper[4651]: I1011 04:51:54.118360 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:54 crc kubenswrapper[4651]: I1011 04:51:54.118408 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:54 crc kubenswrapper[4651]: I1011 04:51:54.118425 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:54 crc kubenswrapper[4651]: I1011 04:51:54.118447 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:54 crc kubenswrapper[4651]: I1011 04:51:54.118465 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:54Z","lastTransitionTime":"2025-10-11T04:51:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:54 crc kubenswrapper[4651]: I1011 04:51:54.155060 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qsgwc" event={"ID":"2f5830d7-3c6f-48f0-b103-a228e7f8e448","Type":"ContainerStarted","Data":"e30f3d9db8e89696a8a98ab24ca995088afa72c957e19ca59ace228a23268d9c"} Oct 11 04:51:54 crc kubenswrapper[4651]: I1011 04:51:54.155162 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qsgwc" event={"ID":"2f5830d7-3c6f-48f0-b103-a228e7f8e448","Type":"ContainerStarted","Data":"8c307ee086caacd0a5299325126ff0b4b140aa1e18153b8f66696534543bb069"} Oct 11 04:51:54 crc kubenswrapper[4651]: I1011 04:51:54.155195 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qsgwc" event={"ID":"2f5830d7-3c6f-48f0-b103-a228e7f8e448","Type":"ContainerStarted","Data":"8ab3f7901176e8d14fde0ed67fce0b1d2eda0cdd337c217401ff08da4a38c219"} Oct 11 04:51:54 crc kubenswrapper[4651]: I1011 04:51:54.181143 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d425a775a5e278ee77231764e58cac3441e04c383eee54f69ba84488ca640eae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:54Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:54 crc kubenswrapper[4651]: I1011 04:51:54.197170 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55e4c9c2d46f67bb0f8ccdb2de27dc82f0280476613b3b30d587ff6f5784e787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:54Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:54 crc kubenswrapper[4651]: I1011 04:51:54.215102 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e5b8526fed77f2e541d6793dc36a902daebce60d367f28f9efb45f7fabb44fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e278c9a5d14f026733737226a6ffc91202b760e48622d5d0031678981fa5f857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:54Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:54 crc kubenswrapper[4651]: I1011 04:51:54.219387 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a551fed8-58fb-48ae-88af-8dc0cb48fc30-metrics-certs\") pod \"network-metrics-daemon-tgvv8\" (UID: \"a551fed8-58fb-48ae-88af-8dc0cb48fc30\") " pod="openshift-multus/network-metrics-daemon-tgvv8" Oct 11 04:51:54 crc kubenswrapper[4651]: E1011 04:51:54.219575 4651 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 11 04:51:54 crc kubenswrapper[4651]: E1011 04:51:54.219664 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a551fed8-58fb-48ae-88af-8dc0cb48fc30-metrics-certs podName:a551fed8-58fb-48ae-88af-8dc0cb48fc30 nodeName:}" failed. No retries permitted until 2025-10-11 04:51:55.219635547 +0000 UTC m=+36.115868373 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/a551fed8-58fb-48ae-88af-8dc0cb48fc30-metrics-certs") pod "network-metrics-daemon-tgvv8" (UID: "a551fed8-58fb-48ae-88af-8dc0cb48fc30") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 11 04:51:54 crc kubenswrapper[4651]: I1011 04:51:54.222092 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:54 crc kubenswrapper[4651]: I1011 04:51:54.222265 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:54 crc kubenswrapper[4651]: I1011 04:51:54.222302 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:54 crc kubenswrapper[4651]: I1011 04:51:54.222349 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:54 crc kubenswrapper[4651]: I1011 04:51:54.222375 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:54Z","lastTransitionTime":"2025-10-11T04:51:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:54 crc kubenswrapper[4651]: I1011 04:51:54.235788 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"519a1ae1-e964-48b0-8b61-835146df28c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f12e36a570de649e1df8107ea828cd8e65c18e111de191d3796fc1f3134e43c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wkk99\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://583486a7d6efd8d25aaeac78ea92f84834730c641516b6ade77aeda2baef58e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wkk99\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-78jnv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:54Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:54 crc kubenswrapper[4651]: I1011 04:51:54.254330 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wz4hw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fbfdd781-994b-49b4-9c8e-edc0ea4145d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d37b5524c49533e7dad8b61f0e20b7d1e37b7ae1afc5b9bfe9146a0744202a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c96nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wz4hw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:54Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:54 crc kubenswrapper[4651]: I1011 04:51:54.274106 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47b36edf-f918-4105-bb64-66bb71a4b8c4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9521c3af1580aa2a62fc760916de9dc4abdc437700567a6ae5df3a96da2e8942\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6fdcb798e11fca3b7b427f5ef0ac955a893709ffedaa472aaab6c219994e940\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff57b9678d77f617cfc792258091526c2a2f8125c240abfe0b15f693abc948eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11e12782179b96dac12bfdd3a25526e93f18da255e2d2fb0d7522e6d9c373b4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e14956f18479aa61fa0755b9c6c4f26ba7ac18f3023e177a76b9a59101b37c1\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-11T04:51:33Z\\\",\\\"message\\\":\\\"W1011 04:51:22.963111 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1011 04:51:22.963696 1 crypto.go:601] Generating new CA for check-endpoints-signer@1760158282 cert, and key in /tmp/serving-cert-3362973750/serving-signer.crt, /tmp/serving-cert-3362973750/serving-signer.key\\\\nI1011 04:51:23.197526 1 observer_polling.go:159] Starting file observer\\\\nW1011 04:51:23.200452 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1011 04:51:23.200572 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1011 04:51:23.202397 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3362973750/tls.crt::/tmp/serving-cert-3362973750/tls.key\\\\\\\"\\\\nF1011 04:51:33.422520 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f8633747abf191f040cb14f5a76529487743229338070aeca597fc93dae8b3f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55c5a1cac9b1045ca931771e5a096d29e8d986a4d9baae8d779d94f2a30b6cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55c5a1cac9b1045ca931771e5a096d29e8d986a4d9baae8d779d94f2a30b6cd3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:54Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:54 crc kubenswrapper[4651]: I1011 04:51:54.294955 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"744aa461-2868-440d-a1b9-6d30c0c50b56\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aabba06dd47ad528f830a47cc79ddabb3789d24ebed62a6e8f976df1b156ef1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb996f17546fc77ac1b8ed27428aaa6060822daa0156cf016dbb24ed278403a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76ee2612930be6596c0186cc000d80039b52e225fd64102002ed9316a0605521\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c6263dc47a8284e9849da1fb15c4788174252f3637665ac6e4b19298ca90c587\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:54Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:54 crc kubenswrapper[4651]: I1011 04:51:54.312153 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:54Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:54 crc kubenswrapper[4651]: I1011 04:51:54.326532 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:54 crc kubenswrapper[4651]: I1011 04:51:54.326587 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:54 crc kubenswrapper[4651]: I1011 04:51:54.326610 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:54 crc kubenswrapper[4651]: I1011 04:51:54.326645 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:54 crc kubenswrapper[4651]: I1011 04:51:54.326668 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:54Z","lastTransitionTime":"2025-10-11T04:51:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:54 crc kubenswrapper[4651]: I1011 04:51:54.328442 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-phsgk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e8fb74d-013d-4103-b029-e8416d079dcf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b7407b7da9820bbcdac4ce9c49c4520834a466b0db187c5f861972713ba1583f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4ftf8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-phsgk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:54Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:54 crc kubenswrapper[4651]: I1011 04:51:54.344296 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:54Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:54 crc kubenswrapper[4651]: I1011 04:51:54.360401 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-tgvv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a551fed8-58fb-48ae-88af-8dc0cb48fc30\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:53Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:53Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:53Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nphhq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nphhq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:53Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-tgvv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:54Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:54 crc kubenswrapper[4651]: I1011 04:51:54.376870 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:54Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:54 crc kubenswrapper[4651]: I1011 04:51:54.390813 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kwhmr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbcac3cb-b774-47ec-a86c-b22191d14d99\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52a0f9e11cf4e8c69d5b356b702ce9df8cbc935daadadf531004a23e5f6dad65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7n87\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kwhmr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:54Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:54 crc kubenswrapper[4651]: I1011 04:51:54.426075 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0fd0aaae2760a1c934aed54b8e07528f0f78db1149c9e1f8674bec90b61a7078\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://788d6a46d740de869fe7cd1d28479f8115a3515c7bd2482e06cbc4dba7b27fac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68954082d4369b9b4d95ba1f15e19593dc330b698562be413055b2ba012b5f8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3eed8335754437c935f07223bf4c4f40f9ab0bbdc9a86b7610f7f6fd55140e37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23b3fde15e637081cbf5fad2e55ccea1f0fe63d39ff5f82a60ce05ffeef9a837\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99f927067aabe4690e99fe50a069afdf68b22d950a8ae141235571ced468c44e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c5fa48d147adc4e46f3bbc60135517a4177c9db0a70c637de5eb499189d3912\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://508370f3dcd6abe4dbf8d3bcb4cf18954c12de8d7b30a7c3e5f1145bd9dbbe3d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-11T04:51:49Z\\\",\\\"message\\\":\\\"aselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1011 04:51:49.433512 5944 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1011 04:51:49.433523 5944 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1011 04:51:49.433531 5944 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1011 04:51:49.433894 5944 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1011 04:51:49.433960 5944 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1011 04:51:49.434048 5944 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1011 04:51:49.434066 5944 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1011 04:51:49.434091 5944 factory.go:656] Stopping watch factory\\\\nI1011 04:51:49.434091 5944 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1011 04:51:49.434124 5944 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1011 04:51:49.434236 5944 handler.go:208] Removed *v1.Node event handler 2\\\\nI1011 04:51:49.434250 5944 handler.go:208] Removed *v1.Node ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:46Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c5fa48d147adc4e46f3bbc60135517a4177c9db0a70c637de5eb499189d3912\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-11T04:51:51Z\\\",\\\"message\\\":\\\"dk6g\\\\nI1011 04:51:51.073982 6068 ovn.go:134] Ensuring zone local for Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g in node crc\\\\nF1011 04:51:51.073979 6068 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:51Z is after 2025-08-24T17:21:41Z]\\\\nI1011 04:51:51.073992 6068 model_client.go:382] Update operations generated as: [{Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.59 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {dce28c51-c9f1-478b-97c8-7e209d6e7cbe}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://add3ed68e0bf59bc569248a3660cfc9a20995641ec65b0bf7133e47c366aad73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccd6180f4c5665d62ee9458795318ef2474c1938d19e6cbfec9c2174fe10e020\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ccd6180f4c5665d62ee9458795318ef2474c1938d19e6cbfec9c2174fe10e020\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6zt9s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:54Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:54 crc kubenswrapper[4651]: I1011 04:51:54.430347 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:54 crc kubenswrapper[4651]: I1011 04:51:54.430392 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:54 crc kubenswrapper[4651]: I1011 04:51:54.430410 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:54 crc kubenswrapper[4651]: I1011 04:51:54.430434 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:54 crc kubenswrapper[4651]: I1011 04:51:54.430451 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:54Z","lastTransitionTime":"2025-10-11T04:51:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:54 crc kubenswrapper[4651]: I1011 04:51:54.450125 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pgwvb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"215170a8-84a9-4e15-9b0e-c1200c680f30\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a036609457cdb5a74a4ccf65ffa2e502f32994e6e3f1ef8a4fd4b89c5454773\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c80b1c5385cb92e140c43bc982ee7cda6406eac9dfe8d9449eaf45332ccc506\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c80b1c5385cb92e140c43bc982ee7cda6406eac9dfe8d9449eaf45332ccc506\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1575fc8fd03d4047a3ed1c0c9df99b77e40296b876d2913a790128f39733d45e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1575fc8fd03d4047a3ed1c0c9df99b77e40296b876d2913a790128f39733d45e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://626e511c110034060639be8565505dc122e834fc63a76d9c4e21dd1dbd5d5a62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://626e511c110034060639be8565505dc122e834fc63a76d9c4e21dd1dbd5d5a62\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9d55cd3f36dcad738ffd05b10e3f665c6bbb949c25365340e70b3c4b53e4175\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9d55cd3f36dcad738ffd05b10e3f665c6bbb949c25365340e70b3c4b53e4175\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27f664812354e7e2ea5feaa075c7f9f72262ddf0a7959196f7a7b14be5af1924\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://27f664812354e7e2ea5feaa075c7f9f72262ddf0a7959196f7a7b14be5af1924\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3b9eccb883d262047b87eae2358f6d27cbd8df5b6917125a8a6259b2eb3433a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3b9eccb883d262047b87eae2358f6d27cbd8df5b6917125a8a6259b2eb3433a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pgwvb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:54Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:54 crc kubenswrapper[4651]: I1011 04:51:54.469662 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qsgwc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f5830d7-3c6f-48f0-b103-a228e7f8e448\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c307ee086caacd0a5299325126ff0b4b140aa1e18153b8f66696534543bb069\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pbjvz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e30f3d9db8e89696a8a98ab24ca995088afa72c957e19ca59ace228a23268d9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pbjvz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:52Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qsgwc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:54Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:54 crc kubenswrapper[4651]: I1011 04:51:54.533446 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:54 crc kubenswrapper[4651]: I1011 04:51:54.533491 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:54 crc kubenswrapper[4651]: I1011 04:51:54.533504 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:54 crc kubenswrapper[4651]: I1011 04:51:54.533524 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:54 crc kubenswrapper[4651]: I1011 04:51:54.533539 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:54Z","lastTransitionTime":"2025-10-11T04:51:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:54 crc kubenswrapper[4651]: I1011 04:51:54.636016 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:54 crc kubenswrapper[4651]: I1011 04:51:54.636081 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:54 crc kubenswrapper[4651]: I1011 04:51:54.636103 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:54 crc kubenswrapper[4651]: I1011 04:51:54.636138 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:54 crc kubenswrapper[4651]: I1011 04:51:54.636183 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:54Z","lastTransitionTime":"2025-10-11T04:51:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:54 crc kubenswrapper[4651]: I1011 04:51:54.739480 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:54 crc kubenswrapper[4651]: I1011 04:51:54.739548 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:54 crc kubenswrapper[4651]: I1011 04:51:54.739565 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:54 crc kubenswrapper[4651]: I1011 04:51:54.739589 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:54 crc kubenswrapper[4651]: I1011 04:51:54.739608 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:54Z","lastTransitionTime":"2025-10-11T04:51:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:54 crc kubenswrapper[4651]: I1011 04:51:54.843485 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:54 crc kubenswrapper[4651]: I1011 04:51:54.843551 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:54 crc kubenswrapper[4651]: I1011 04:51:54.843579 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:54 crc kubenswrapper[4651]: I1011 04:51:54.843611 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:54 crc kubenswrapper[4651]: I1011 04:51:54.843634 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:54Z","lastTransitionTime":"2025-10-11T04:51:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:54 crc kubenswrapper[4651]: I1011 04:51:54.869010 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 04:51:54 crc kubenswrapper[4651]: E1011 04:51:54.869257 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 04:51:54 crc kubenswrapper[4651]: I1011 04:51:54.946373 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:54 crc kubenswrapper[4651]: I1011 04:51:54.946417 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:54 crc kubenswrapper[4651]: I1011 04:51:54.946434 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:54 crc kubenswrapper[4651]: I1011 04:51:54.946458 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:54 crc kubenswrapper[4651]: I1011 04:51:54.946475 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:54Z","lastTransitionTime":"2025-10-11T04:51:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:55 crc kubenswrapper[4651]: I1011 04:51:55.049071 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:55 crc kubenswrapper[4651]: I1011 04:51:55.049135 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:55 crc kubenswrapper[4651]: I1011 04:51:55.049326 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:55 crc kubenswrapper[4651]: I1011 04:51:55.049347 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:55 crc kubenswrapper[4651]: I1011 04:51:55.049363 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:55Z","lastTransitionTime":"2025-10-11T04:51:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:55 crc kubenswrapper[4651]: I1011 04:51:55.151959 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:55 crc kubenswrapper[4651]: I1011 04:51:55.152015 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:55 crc kubenswrapper[4651]: I1011 04:51:55.152031 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:55 crc kubenswrapper[4651]: I1011 04:51:55.152055 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:55 crc kubenswrapper[4651]: I1011 04:51:55.152072 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:55Z","lastTransitionTime":"2025-10-11T04:51:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:55 crc kubenswrapper[4651]: I1011 04:51:55.171857 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" Oct 11 04:51:55 crc kubenswrapper[4651]: I1011 04:51:55.180983 4651 scope.go:117] "RemoveContainer" containerID="4c5fa48d147adc4e46f3bbc60135517a4177c9db0a70c637de5eb499189d3912" Oct 11 04:51:55 crc kubenswrapper[4651]: E1011 04:51:55.181428 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-6zt9s_openshift-ovn-kubernetes(28e01c08-a461-4f44-a49c-4bf92fd3a2ce)\"" pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" podUID="28e01c08-a461-4f44-a49c-4bf92fd3a2ce" Oct 11 04:51:55 crc kubenswrapper[4651]: I1011 04:51:55.199594 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d425a775a5e278ee77231764e58cac3441e04c383eee54f69ba84488ca640eae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:55Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:55 crc kubenswrapper[4651]: I1011 04:51:55.218099 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55e4c9c2d46f67bb0f8ccdb2de27dc82f0280476613b3b30d587ff6f5784e787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:55Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:55 crc kubenswrapper[4651]: I1011 04:51:55.231884 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a551fed8-58fb-48ae-88af-8dc0cb48fc30-metrics-certs\") pod \"network-metrics-daemon-tgvv8\" (UID: \"a551fed8-58fb-48ae-88af-8dc0cb48fc30\") " pod="openshift-multus/network-metrics-daemon-tgvv8" Oct 11 04:51:55 crc kubenswrapper[4651]: E1011 04:51:55.232186 4651 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 11 04:51:55 crc kubenswrapper[4651]: E1011 04:51:55.232278 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a551fed8-58fb-48ae-88af-8dc0cb48fc30-metrics-certs podName:a551fed8-58fb-48ae-88af-8dc0cb48fc30 nodeName:}" failed. No retries permitted until 2025-10-11 04:51:57.232252629 +0000 UTC m=+38.128485455 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/a551fed8-58fb-48ae-88af-8dc0cb48fc30-metrics-certs") pod "network-metrics-daemon-tgvv8" (UID: "a551fed8-58fb-48ae-88af-8dc0cb48fc30") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 11 04:51:55 crc kubenswrapper[4651]: I1011 04:51:55.241307 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e5b8526fed77f2e541d6793dc36a902daebce60d367f28f9efb45f7fabb44fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e278c9a5d14f026733737226a6ffc91202b760e48622d5d0031678981fa5f857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:55Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:55 crc kubenswrapper[4651]: I1011 04:51:55.255022 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:55 crc kubenswrapper[4651]: I1011 04:51:55.255064 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:55 crc kubenswrapper[4651]: I1011 04:51:55.255084 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:55 crc kubenswrapper[4651]: I1011 04:51:55.255111 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:55 crc kubenswrapper[4651]: I1011 04:51:55.255128 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:55Z","lastTransitionTime":"2025-10-11T04:51:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:55 crc kubenswrapper[4651]: I1011 04:51:55.263496 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:55Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:55 crc kubenswrapper[4651]: I1011 04:51:55.282382 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"519a1ae1-e964-48b0-8b61-835146df28c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f12e36a570de649e1df8107ea828cd8e65c18e111de191d3796fc1f3134e43c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wkk99\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://583486a7d6efd8d25aaeac78ea92f84834730c641516b6ade77aeda2baef58e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wkk99\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-78jnv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:55Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:55 crc kubenswrapper[4651]: I1011 04:51:55.303211 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wz4hw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fbfdd781-994b-49b4-9c8e-edc0ea4145d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d37b5524c49533e7dad8b61f0e20b7d1e37b7ae1afc5b9bfe9146a0744202a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c96nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wz4hw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:55Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:55 crc kubenswrapper[4651]: I1011 04:51:55.324914 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47b36edf-f918-4105-bb64-66bb71a4b8c4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9521c3af1580aa2a62fc760916de9dc4abdc437700567a6ae5df3a96da2e8942\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6fdcb798e11fca3b7b427f5ef0ac955a893709ffedaa472aaab6c219994e940\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff57b9678d77f617cfc792258091526c2a2f8125c240abfe0b15f693abc948eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11e12782179b96dac12bfdd3a25526e93f18da255e2d2fb0d7522e6d9c373b4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e14956f18479aa61fa0755b9c6c4f26ba7ac18f3023e177a76b9a59101b37c1\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-11T04:51:33Z\\\",\\\"message\\\":\\\"W1011 04:51:22.963111 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1011 04:51:22.963696 1 crypto.go:601] Generating new CA for check-endpoints-signer@1760158282 cert, and key in /tmp/serving-cert-3362973750/serving-signer.crt, /tmp/serving-cert-3362973750/serving-signer.key\\\\nI1011 04:51:23.197526 1 observer_polling.go:159] Starting file observer\\\\nW1011 04:51:23.200452 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1011 04:51:23.200572 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1011 04:51:23.202397 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3362973750/tls.crt::/tmp/serving-cert-3362973750/tls.key\\\\\\\"\\\\nF1011 04:51:33.422520 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f8633747abf191f040cb14f5a76529487743229338070aeca597fc93dae8b3f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55c5a1cac9b1045ca931771e5a096d29e8d986a4d9baae8d779d94f2a30b6cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55c5a1cac9b1045ca931771e5a096d29e8d986a4d9baae8d779d94f2a30b6cd3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:55Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:55 crc kubenswrapper[4651]: I1011 04:51:55.341292 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"744aa461-2868-440d-a1b9-6d30c0c50b56\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aabba06dd47ad528f830a47cc79ddabb3789d24ebed62a6e8f976df1b156ef1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb996f17546fc77ac1b8ed27428aaa6060822daa0156cf016dbb24ed278403a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76ee2612930be6596c0186cc000d80039b52e225fd64102002ed9316a0605521\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c6263dc47a8284e9849da1fb15c4788174252f3637665ac6e4b19298ca90c587\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:55Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:55 crc kubenswrapper[4651]: I1011 04:51:55.355608 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-phsgk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e8fb74d-013d-4103-b029-e8416d079dcf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b7407b7da9820bbcdac4ce9c49c4520834a466b0db187c5f861972713ba1583f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4ftf8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-phsgk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:55Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:55 crc kubenswrapper[4651]: I1011 04:51:55.357522 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:55 crc kubenswrapper[4651]: I1011 04:51:55.357579 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:55 crc kubenswrapper[4651]: I1011 04:51:55.357596 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:55 crc kubenswrapper[4651]: I1011 04:51:55.357620 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:55 crc kubenswrapper[4651]: I1011 04:51:55.357639 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:55Z","lastTransitionTime":"2025-10-11T04:51:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:55 crc kubenswrapper[4651]: I1011 04:51:55.373215 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:55Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:55 crc kubenswrapper[4651]: I1011 04:51:55.391579 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-tgvv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a551fed8-58fb-48ae-88af-8dc0cb48fc30\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:53Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:53Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:53Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nphhq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nphhq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:53Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-tgvv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:55Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:55 crc kubenswrapper[4651]: I1011 04:51:55.411070 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qsgwc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f5830d7-3c6f-48f0-b103-a228e7f8e448\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c307ee086caacd0a5299325126ff0b4b140aa1e18153b8f66696534543bb069\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pbjvz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e30f3d9db8e89696a8a98ab24ca995088afa72c957e19ca59ace228a23268d9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pbjvz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:52Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qsgwc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:55Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:55 crc kubenswrapper[4651]: I1011 04:51:55.431476 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:55Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:55 crc kubenswrapper[4651]: I1011 04:51:55.448121 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kwhmr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbcac3cb-b774-47ec-a86c-b22191d14d99\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52a0f9e11cf4e8c69d5b356b702ce9df8cbc935daadadf531004a23e5f6dad65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7n87\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kwhmr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:55Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:55 crc kubenswrapper[4651]: I1011 04:51:55.459900 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:55 crc kubenswrapper[4651]: I1011 04:51:55.459939 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:55 crc kubenswrapper[4651]: I1011 04:51:55.459952 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:55 crc kubenswrapper[4651]: I1011 04:51:55.459971 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:55 crc kubenswrapper[4651]: I1011 04:51:55.459985 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:55Z","lastTransitionTime":"2025-10-11T04:51:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:55 crc kubenswrapper[4651]: I1011 04:51:55.481204 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0fd0aaae2760a1c934aed54b8e07528f0f78db1149c9e1f8674bec90b61a7078\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://788d6a46d740de869fe7cd1d28479f8115a3515c7bd2482e06cbc4dba7b27fac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68954082d4369b9b4d95ba1f15e19593dc330b698562be413055b2ba012b5f8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3eed8335754437c935f07223bf4c4f40f9ab0bbdc9a86b7610f7f6fd55140e37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23b3fde15e637081cbf5fad2e55ccea1f0fe63d39ff5f82a60ce05ffeef9a837\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99f927067aabe4690e99fe50a069afdf68b22d950a8ae141235571ced468c44e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c5fa48d147adc4e46f3bbc60135517a4177c9db0a70c637de5eb499189d3912\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c5fa48d147adc4e46f3bbc60135517a4177c9db0a70c637de5eb499189d3912\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-11T04:51:51Z\\\",\\\"message\\\":\\\"dk6g\\\\nI1011 04:51:51.073982 6068 ovn.go:134] Ensuring zone local for Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g in node crc\\\\nF1011 04:51:51.073979 6068 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:51Z is after 2025-08-24T17:21:41Z]\\\\nI1011 04:51:51.073992 6068 model_client.go:382] Update operations generated as: [{Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.59 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {dce28c51-c9f1-478b-97c8-7e209d6e7cbe}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:50Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-6zt9s_openshift-ovn-kubernetes(28e01c08-a461-4f44-a49c-4bf92fd3a2ce)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://add3ed68e0bf59bc569248a3660cfc9a20995641ec65b0bf7133e47c366aad73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccd6180f4c5665d62ee9458795318ef2474c1938d19e6cbfec9c2174fe10e020\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ccd6180f4c5665d62ee9458795318ef2474c1938d19e6cbfec9c2174fe10e020\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6zt9s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:55Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:55 crc kubenswrapper[4651]: I1011 04:51:55.504144 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pgwvb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"215170a8-84a9-4e15-9b0e-c1200c680f30\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a036609457cdb5a74a4ccf65ffa2e502f32994e6e3f1ef8a4fd4b89c5454773\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c80b1c5385cb92e140c43bc982ee7cda6406eac9dfe8d9449eaf45332ccc506\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c80b1c5385cb92e140c43bc982ee7cda6406eac9dfe8d9449eaf45332ccc506\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1575fc8fd03d4047a3ed1c0c9df99b77e40296b876d2913a790128f39733d45e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1575fc8fd03d4047a3ed1c0c9df99b77e40296b876d2913a790128f39733d45e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://626e511c110034060639be8565505dc122e834fc63a76d9c4e21dd1dbd5d5a62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://626e511c110034060639be8565505dc122e834fc63a76d9c4e21dd1dbd5d5a62\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9d55cd3f36dcad738ffd05b10e3f665c6bbb949c25365340e70b3c4b53e4175\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9d55cd3f36dcad738ffd05b10e3f665c6bbb949c25365340e70b3c4b53e4175\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27f664812354e7e2ea5feaa075c7f9f72262ddf0a7959196f7a7b14be5af1924\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://27f664812354e7e2ea5feaa075c7f9f72262ddf0a7959196f7a7b14be5af1924\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3b9eccb883d262047b87eae2358f6d27cbd8df5b6917125a8a6259b2eb3433a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3b9eccb883d262047b87eae2358f6d27cbd8df5b6917125a8a6259b2eb3433a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pgwvb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:55Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:55 crc kubenswrapper[4651]: I1011 04:51:55.535534 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 04:51:55 crc kubenswrapper[4651]: E1011 04:51:55.535729 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 04:52:11.535703491 +0000 UTC m=+52.431936297 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 04:51:55 crc kubenswrapper[4651]: I1011 04:51:55.535799 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 04:51:55 crc kubenswrapper[4651]: I1011 04:51:55.535859 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 04:51:55 crc kubenswrapper[4651]: E1011 04:51:55.535962 4651 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 11 04:51:55 crc kubenswrapper[4651]: E1011 04:51:55.535992 4651 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 11 04:51:55 crc kubenswrapper[4651]: E1011 04:51:55.536016 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-11 04:52:11.536003518 +0000 UTC m=+52.432236324 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 11 04:51:55 crc kubenswrapper[4651]: E1011 04:51:55.536055 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-11 04:52:11.536036559 +0000 UTC m=+52.432269395 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 11 04:51:55 crc kubenswrapper[4651]: I1011 04:51:55.562647 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:55 crc kubenswrapper[4651]: I1011 04:51:55.562709 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:55 crc kubenswrapper[4651]: I1011 04:51:55.562726 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:55 crc kubenswrapper[4651]: I1011 04:51:55.562750 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:55 crc kubenswrapper[4651]: I1011 04:51:55.562768 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:55Z","lastTransitionTime":"2025-10-11T04:51:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:55 crc kubenswrapper[4651]: I1011 04:51:55.637439 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 04:51:55 crc kubenswrapper[4651]: I1011 04:51:55.637545 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 04:51:55 crc kubenswrapper[4651]: E1011 04:51:55.637717 4651 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 11 04:51:55 crc kubenswrapper[4651]: E1011 04:51:55.637769 4651 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 11 04:51:55 crc kubenswrapper[4651]: E1011 04:51:55.637795 4651 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 11 04:51:55 crc kubenswrapper[4651]: E1011 04:51:55.637876 4651 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 11 04:51:55 crc kubenswrapper[4651]: E1011 04:51:55.637923 4651 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 11 04:51:55 crc kubenswrapper[4651]: E1011 04:51:55.637944 4651 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 11 04:51:55 crc kubenswrapper[4651]: E1011 04:51:55.637947 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-11 04:52:11.637912098 +0000 UTC m=+52.534144974 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 11 04:51:55 crc kubenswrapper[4651]: E1011 04:51:55.638044 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-11 04:52:11.638017751 +0000 UTC m=+52.534250587 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 11 04:51:55 crc kubenswrapper[4651]: I1011 04:51:55.665965 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:55 crc kubenswrapper[4651]: I1011 04:51:55.666019 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:55 crc kubenswrapper[4651]: I1011 04:51:55.666030 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:55 crc kubenswrapper[4651]: I1011 04:51:55.666051 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:55 crc kubenswrapper[4651]: I1011 04:51:55.666063 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:55Z","lastTransitionTime":"2025-10-11T04:51:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:55 crc kubenswrapper[4651]: I1011 04:51:55.694707 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:55 crc kubenswrapper[4651]: I1011 04:51:55.694755 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:55 crc kubenswrapper[4651]: I1011 04:51:55.694782 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:55 crc kubenswrapper[4651]: I1011 04:51:55.694800 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:55 crc kubenswrapper[4651]: I1011 04:51:55.694812 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:55Z","lastTransitionTime":"2025-10-11T04:51:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:55 crc kubenswrapper[4651]: E1011 04:51:55.713908 4651 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:51:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:51:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:51:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:51:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a821a5c3-63e0-43db-82e0-e9c6e98ead52\\\",\\\"systemUUID\\\":\\\"f1c4ea71-0c28-43a7-99a4-e27ff72e186a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:55Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:55 crc kubenswrapper[4651]: I1011 04:51:55.719312 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:55 crc kubenswrapper[4651]: I1011 04:51:55.719387 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:55 crc kubenswrapper[4651]: I1011 04:51:55.719410 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:55 crc kubenswrapper[4651]: I1011 04:51:55.719438 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:55 crc kubenswrapper[4651]: I1011 04:51:55.719456 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:55Z","lastTransitionTime":"2025-10-11T04:51:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:55 crc kubenswrapper[4651]: E1011 04:51:55.737586 4651 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:51:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:51:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:51:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:51:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a821a5c3-63e0-43db-82e0-e9c6e98ead52\\\",\\\"systemUUID\\\":\\\"f1c4ea71-0c28-43a7-99a4-e27ff72e186a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:55Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:55 crc kubenswrapper[4651]: I1011 04:51:55.741723 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:55 crc kubenswrapper[4651]: I1011 04:51:55.741785 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:55 crc kubenswrapper[4651]: I1011 04:51:55.741806 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:55 crc kubenswrapper[4651]: I1011 04:51:55.741859 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:55 crc kubenswrapper[4651]: I1011 04:51:55.741879 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:55Z","lastTransitionTime":"2025-10-11T04:51:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:55 crc kubenswrapper[4651]: E1011 04:51:55.761509 4651 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:51:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:51:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:51:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:51:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a821a5c3-63e0-43db-82e0-e9c6e98ead52\\\",\\\"systemUUID\\\":\\\"f1c4ea71-0c28-43a7-99a4-e27ff72e186a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:55Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:55 crc kubenswrapper[4651]: I1011 04:51:55.766206 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:55 crc kubenswrapper[4651]: I1011 04:51:55.766294 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:55 crc kubenswrapper[4651]: I1011 04:51:55.766316 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:55 crc kubenswrapper[4651]: I1011 04:51:55.766346 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:55 crc kubenswrapper[4651]: I1011 04:51:55.766372 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:55Z","lastTransitionTime":"2025-10-11T04:51:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:55 crc kubenswrapper[4651]: E1011 04:51:55.787156 4651 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:51:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:51:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:51:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:51:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a821a5c3-63e0-43db-82e0-e9c6e98ead52\\\",\\\"systemUUID\\\":\\\"f1c4ea71-0c28-43a7-99a4-e27ff72e186a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:55Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:55 crc kubenswrapper[4651]: I1011 04:51:55.793768 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:55 crc kubenswrapper[4651]: I1011 04:51:55.793807 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:55 crc kubenswrapper[4651]: I1011 04:51:55.793844 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:55 crc kubenswrapper[4651]: I1011 04:51:55.793862 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:55 crc kubenswrapper[4651]: I1011 04:51:55.793874 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:55Z","lastTransitionTime":"2025-10-11T04:51:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:55 crc kubenswrapper[4651]: E1011 04:51:55.816989 4651 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:51:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:51:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:51:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:51:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a821a5c3-63e0-43db-82e0-e9c6e98ead52\\\",\\\"systemUUID\\\":\\\"f1c4ea71-0c28-43a7-99a4-e27ff72e186a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:55Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:55 crc kubenswrapper[4651]: E1011 04:51:55.817225 4651 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 11 04:51:55 crc kubenswrapper[4651]: I1011 04:51:55.819616 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:55 crc kubenswrapper[4651]: I1011 04:51:55.819751 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:55 crc kubenswrapper[4651]: I1011 04:51:55.819769 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:55 crc kubenswrapper[4651]: I1011 04:51:55.819795 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:55 crc kubenswrapper[4651]: I1011 04:51:55.819812 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:55Z","lastTransitionTime":"2025-10-11T04:51:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:55 crc kubenswrapper[4651]: I1011 04:51:55.869520 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 04:51:55 crc kubenswrapper[4651]: I1011 04:51:55.869567 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tgvv8" Oct 11 04:51:55 crc kubenswrapper[4651]: I1011 04:51:55.869607 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 04:51:55 crc kubenswrapper[4651]: E1011 04:51:55.869717 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 04:51:55 crc kubenswrapper[4651]: E1011 04:51:55.869940 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tgvv8" podUID="a551fed8-58fb-48ae-88af-8dc0cb48fc30" Oct 11 04:51:55 crc kubenswrapper[4651]: E1011 04:51:55.870108 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 04:51:55 crc kubenswrapper[4651]: I1011 04:51:55.923430 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:55 crc kubenswrapper[4651]: I1011 04:51:55.923474 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:55 crc kubenswrapper[4651]: I1011 04:51:55.923482 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:55 crc kubenswrapper[4651]: I1011 04:51:55.923496 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:55 crc kubenswrapper[4651]: I1011 04:51:55.923505 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:55Z","lastTransitionTime":"2025-10-11T04:51:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:56 crc kubenswrapper[4651]: I1011 04:51:56.031669 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:56 crc kubenswrapper[4651]: I1011 04:51:56.031720 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:56 crc kubenswrapper[4651]: I1011 04:51:56.031733 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:56 crc kubenswrapper[4651]: I1011 04:51:56.031751 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:56 crc kubenswrapper[4651]: I1011 04:51:56.031763 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:56Z","lastTransitionTime":"2025-10-11T04:51:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:56 crc kubenswrapper[4651]: I1011 04:51:56.135409 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:56 crc kubenswrapper[4651]: I1011 04:51:56.135484 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:56 crc kubenswrapper[4651]: I1011 04:51:56.135507 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:56 crc kubenswrapper[4651]: I1011 04:51:56.135535 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:56 crc kubenswrapper[4651]: I1011 04:51:56.135556 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:56Z","lastTransitionTime":"2025-10-11T04:51:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:56 crc kubenswrapper[4651]: I1011 04:51:56.237725 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:56 crc kubenswrapper[4651]: I1011 04:51:56.237772 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:56 crc kubenswrapper[4651]: I1011 04:51:56.237788 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:56 crc kubenswrapper[4651]: I1011 04:51:56.237810 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:56 crc kubenswrapper[4651]: I1011 04:51:56.237855 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:56Z","lastTransitionTime":"2025-10-11T04:51:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:56 crc kubenswrapper[4651]: I1011 04:51:56.340281 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:56 crc kubenswrapper[4651]: I1011 04:51:56.340336 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:56 crc kubenswrapper[4651]: I1011 04:51:56.340353 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:56 crc kubenswrapper[4651]: I1011 04:51:56.340376 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:56 crc kubenswrapper[4651]: I1011 04:51:56.340393 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:56Z","lastTransitionTime":"2025-10-11T04:51:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:56 crc kubenswrapper[4651]: I1011 04:51:56.443408 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:56 crc kubenswrapper[4651]: I1011 04:51:56.443460 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:56 crc kubenswrapper[4651]: I1011 04:51:56.443477 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:56 crc kubenswrapper[4651]: I1011 04:51:56.443500 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:56 crc kubenswrapper[4651]: I1011 04:51:56.443516 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:56Z","lastTransitionTime":"2025-10-11T04:51:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:56 crc kubenswrapper[4651]: I1011 04:51:56.546386 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:56 crc kubenswrapper[4651]: I1011 04:51:56.546470 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:56 crc kubenswrapper[4651]: I1011 04:51:56.546492 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:56 crc kubenswrapper[4651]: I1011 04:51:56.546522 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:56 crc kubenswrapper[4651]: I1011 04:51:56.546546 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:56Z","lastTransitionTime":"2025-10-11T04:51:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:56 crc kubenswrapper[4651]: I1011 04:51:56.650184 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:56 crc kubenswrapper[4651]: I1011 04:51:56.650253 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:56 crc kubenswrapper[4651]: I1011 04:51:56.650275 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:56 crc kubenswrapper[4651]: I1011 04:51:56.650306 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:56 crc kubenswrapper[4651]: I1011 04:51:56.650333 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:56Z","lastTransitionTime":"2025-10-11T04:51:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:56 crc kubenswrapper[4651]: I1011 04:51:56.753202 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:56 crc kubenswrapper[4651]: I1011 04:51:56.753235 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:56 crc kubenswrapper[4651]: I1011 04:51:56.753242 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:56 crc kubenswrapper[4651]: I1011 04:51:56.753255 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:56 crc kubenswrapper[4651]: I1011 04:51:56.753287 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:56Z","lastTransitionTime":"2025-10-11T04:51:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:56 crc kubenswrapper[4651]: I1011 04:51:56.856499 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:56 crc kubenswrapper[4651]: I1011 04:51:56.856872 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:56 crc kubenswrapper[4651]: I1011 04:51:56.856912 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:56 crc kubenswrapper[4651]: I1011 04:51:56.856943 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:56 crc kubenswrapper[4651]: I1011 04:51:56.856966 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:56Z","lastTransitionTime":"2025-10-11T04:51:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:56 crc kubenswrapper[4651]: I1011 04:51:56.868655 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 04:51:56 crc kubenswrapper[4651]: E1011 04:51:56.868863 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 04:51:56 crc kubenswrapper[4651]: I1011 04:51:56.959740 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:56 crc kubenswrapper[4651]: I1011 04:51:56.959849 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:56 crc kubenswrapper[4651]: I1011 04:51:56.959868 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:56 crc kubenswrapper[4651]: I1011 04:51:56.959893 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:56 crc kubenswrapper[4651]: I1011 04:51:56.959910 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:56Z","lastTransitionTime":"2025-10-11T04:51:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:57 crc kubenswrapper[4651]: I1011 04:51:57.063075 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:57 crc kubenswrapper[4651]: I1011 04:51:57.063145 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:57 crc kubenswrapper[4651]: I1011 04:51:57.063162 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:57 crc kubenswrapper[4651]: I1011 04:51:57.063186 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:57 crc kubenswrapper[4651]: I1011 04:51:57.063203 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:57Z","lastTransitionTime":"2025-10-11T04:51:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:57 crc kubenswrapper[4651]: I1011 04:51:57.165467 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:57 crc kubenswrapper[4651]: I1011 04:51:57.165538 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:57 crc kubenswrapper[4651]: I1011 04:51:57.165568 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:57 crc kubenswrapper[4651]: I1011 04:51:57.165600 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:57 crc kubenswrapper[4651]: I1011 04:51:57.165627 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:57Z","lastTransitionTime":"2025-10-11T04:51:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:57 crc kubenswrapper[4651]: I1011 04:51:57.253438 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a551fed8-58fb-48ae-88af-8dc0cb48fc30-metrics-certs\") pod \"network-metrics-daemon-tgvv8\" (UID: \"a551fed8-58fb-48ae-88af-8dc0cb48fc30\") " pod="openshift-multus/network-metrics-daemon-tgvv8" Oct 11 04:51:57 crc kubenswrapper[4651]: E1011 04:51:57.253686 4651 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 11 04:51:57 crc kubenswrapper[4651]: E1011 04:51:57.253787 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a551fed8-58fb-48ae-88af-8dc0cb48fc30-metrics-certs podName:a551fed8-58fb-48ae-88af-8dc0cb48fc30 nodeName:}" failed. No retries permitted until 2025-10-11 04:52:01.253763619 +0000 UTC m=+42.149996485 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/a551fed8-58fb-48ae-88af-8dc0cb48fc30-metrics-certs") pod "network-metrics-daemon-tgvv8" (UID: "a551fed8-58fb-48ae-88af-8dc0cb48fc30") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 11 04:51:57 crc kubenswrapper[4651]: I1011 04:51:57.268766 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:57 crc kubenswrapper[4651]: I1011 04:51:57.268851 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:57 crc kubenswrapper[4651]: I1011 04:51:57.268871 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:57 crc kubenswrapper[4651]: I1011 04:51:57.268894 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:57 crc kubenswrapper[4651]: I1011 04:51:57.268909 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:57Z","lastTransitionTime":"2025-10-11T04:51:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:57 crc kubenswrapper[4651]: I1011 04:51:57.372025 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:57 crc kubenswrapper[4651]: I1011 04:51:57.372114 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:57 crc kubenswrapper[4651]: I1011 04:51:57.372132 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:57 crc kubenswrapper[4651]: I1011 04:51:57.372163 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:57 crc kubenswrapper[4651]: I1011 04:51:57.372192 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:57Z","lastTransitionTime":"2025-10-11T04:51:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:57 crc kubenswrapper[4651]: I1011 04:51:57.475088 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:57 crc kubenswrapper[4651]: I1011 04:51:57.475200 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:57 crc kubenswrapper[4651]: I1011 04:51:57.475219 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:57 crc kubenswrapper[4651]: I1011 04:51:57.475244 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:57 crc kubenswrapper[4651]: I1011 04:51:57.475264 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:57Z","lastTransitionTime":"2025-10-11T04:51:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:57 crc kubenswrapper[4651]: I1011 04:51:57.578367 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:57 crc kubenswrapper[4651]: I1011 04:51:57.578432 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:57 crc kubenswrapper[4651]: I1011 04:51:57.578449 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:57 crc kubenswrapper[4651]: I1011 04:51:57.578481 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:57 crc kubenswrapper[4651]: I1011 04:51:57.578501 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:57Z","lastTransitionTime":"2025-10-11T04:51:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:57 crc kubenswrapper[4651]: I1011 04:51:57.681371 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:57 crc kubenswrapper[4651]: I1011 04:51:57.681452 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:57 crc kubenswrapper[4651]: I1011 04:51:57.681475 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:57 crc kubenswrapper[4651]: I1011 04:51:57.681501 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:57 crc kubenswrapper[4651]: I1011 04:51:57.681518 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:57Z","lastTransitionTime":"2025-10-11T04:51:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:57 crc kubenswrapper[4651]: I1011 04:51:57.784247 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:57 crc kubenswrapper[4651]: I1011 04:51:57.784329 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:57 crc kubenswrapper[4651]: I1011 04:51:57.784354 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:57 crc kubenswrapper[4651]: I1011 04:51:57.784384 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:57 crc kubenswrapper[4651]: I1011 04:51:57.784409 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:57Z","lastTransitionTime":"2025-10-11T04:51:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:57 crc kubenswrapper[4651]: I1011 04:51:57.868718 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 04:51:57 crc kubenswrapper[4651]: I1011 04:51:57.868771 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 04:51:57 crc kubenswrapper[4651]: E1011 04:51:57.869109 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 04:51:57 crc kubenswrapper[4651]: I1011 04:51:57.869218 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tgvv8" Oct 11 04:51:57 crc kubenswrapper[4651]: E1011 04:51:57.869317 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 04:51:57 crc kubenswrapper[4651]: E1011 04:51:57.869470 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tgvv8" podUID="a551fed8-58fb-48ae-88af-8dc0cb48fc30" Oct 11 04:51:57 crc kubenswrapper[4651]: I1011 04:51:57.887816 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:57 crc kubenswrapper[4651]: I1011 04:51:57.887918 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:57 crc kubenswrapper[4651]: I1011 04:51:57.888006 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:57 crc kubenswrapper[4651]: I1011 04:51:57.888041 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:57 crc kubenswrapper[4651]: I1011 04:51:57.888063 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:57Z","lastTransitionTime":"2025-10-11T04:51:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:57 crc kubenswrapper[4651]: I1011 04:51:57.990883 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:57 crc kubenswrapper[4651]: I1011 04:51:57.990941 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:57 crc kubenswrapper[4651]: I1011 04:51:57.990960 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:57 crc kubenswrapper[4651]: I1011 04:51:57.990992 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:57 crc kubenswrapper[4651]: I1011 04:51:57.991018 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:57Z","lastTransitionTime":"2025-10-11T04:51:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:58 crc kubenswrapper[4651]: I1011 04:51:58.094495 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:58 crc kubenswrapper[4651]: I1011 04:51:58.094579 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:58 crc kubenswrapper[4651]: I1011 04:51:58.094597 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:58 crc kubenswrapper[4651]: I1011 04:51:58.095134 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:58 crc kubenswrapper[4651]: I1011 04:51:58.095207 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:58Z","lastTransitionTime":"2025-10-11T04:51:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:58 crc kubenswrapper[4651]: I1011 04:51:58.198992 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:58 crc kubenswrapper[4651]: I1011 04:51:58.199054 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:58 crc kubenswrapper[4651]: I1011 04:51:58.199076 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:58 crc kubenswrapper[4651]: I1011 04:51:58.199105 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:58 crc kubenswrapper[4651]: I1011 04:51:58.199126 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:58Z","lastTransitionTime":"2025-10-11T04:51:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:58 crc kubenswrapper[4651]: I1011 04:51:58.301391 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:58 crc kubenswrapper[4651]: I1011 04:51:58.302023 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:58 crc kubenswrapper[4651]: I1011 04:51:58.302063 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:58 crc kubenswrapper[4651]: I1011 04:51:58.302092 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:58 crc kubenswrapper[4651]: I1011 04:51:58.302116 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:58Z","lastTransitionTime":"2025-10-11T04:51:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:58 crc kubenswrapper[4651]: I1011 04:51:58.405770 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:58 crc kubenswrapper[4651]: I1011 04:51:58.405872 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:58 crc kubenswrapper[4651]: I1011 04:51:58.405907 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:58 crc kubenswrapper[4651]: I1011 04:51:58.405936 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:58 crc kubenswrapper[4651]: I1011 04:51:58.405957 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:58Z","lastTransitionTime":"2025-10-11T04:51:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:58 crc kubenswrapper[4651]: I1011 04:51:58.509510 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:58 crc kubenswrapper[4651]: I1011 04:51:58.509569 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:58 crc kubenswrapper[4651]: I1011 04:51:58.509585 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:58 crc kubenswrapper[4651]: I1011 04:51:58.509609 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:58 crc kubenswrapper[4651]: I1011 04:51:58.509625 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:58Z","lastTransitionTime":"2025-10-11T04:51:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:58 crc kubenswrapper[4651]: I1011 04:51:58.612806 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:58 crc kubenswrapper[4651]: I1011 04:51:58.612866 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:58 crc kubenswrapper[4651]: I1011 04:51:58.612874 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:58 crc kubenswrapper[4651]: I1011 04:51:58.612889 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:58 crc kubenswrapper[4651]: I1011 04:51:58.612898 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:58Z","lastTransitionTime":"2025-10-11T04:51:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:58 crc kubenswrapper[4651]: I1011 04:51:58.715725 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:58 crc kubenswrapper[4651]: I1011 04:51:58.715786 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:58 crc kubenswrapper[4651]: I1011 04:51:58.715864 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:58 crc kubenswrapper[4651]: I1011 04:51:58.715889 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:58 crc kubenswrapper[4651]: I1011 04:51:58.715907 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:58Z","lastTransitionTime":"2025-10-11T04:51:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:58 crc kubenswrapper[4651]: I1011 04:51:58.818703 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:58 crc kubenswrapper[4651]: I1011 04:51:58.818751 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:58 crc kubenswrapper[4651]: I1011 04:51:58.818764 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:58 crc kubenswrapper[4651]: I1011 04:51:58.818781 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:58 crc kubenswrapper[4651]: I1011 04:51:58.818799 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:58Z","lastTransitionTime":"2025-10-11T04:51:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:58 crc kubenswrapper[4651]: I1011 04:51:58.868522 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 04:51:58 crc kubenswrapper[4651]: E1011 04:51:58.868706 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 04:51:58 crc kubenswrapper[4651]: I1011 04:51:58.921597 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:58 crc kubenswrapper[4651]: I1011 04:51:58.921671 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:58 crc kubenswrapper[4651]: I1011 04:51:58.921685 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:58 crc kubenswrapper[4651]: I1011 04:51:58.921702 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:58 crc kubenswrapper[4651]: I1011 04:51:58.921715 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:58Z","lastTransitionTime":"2025-10-11T04:51:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:59 crc kubenswrapper[4651]: I1011 04:51:59.050814 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:59 crc kubenswrapper[4651]: I1011 04:51:59.050913 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:59 crc kubenswrapper[4651]: I1011 04:51:59.050934 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:59 crc kubenswrapper[4651]: I1011 04:51:59.050957 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:59 crc kubenswrapper[4651]: I1011 04:51:59.050976 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:59Z","lastTransitionTime":"2025-10-11T04:51:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:59 crc kubenswrapper[4651]: I1011 04:51:59.153966 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:59 crc kubenswrapper[4651]: I1011 04:51:59.154008 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:59 crc kubenswrapper[4651]: I1011 04:51:59.154019 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:59 crc kubenswrapper[4651]: I1011 04:51:59.154035 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:59 crc kubenswrapper[4651]: I1011 04:51:59.154047 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:59Z","lastTransitionTime":"2025-10-11T04:51:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:59 crc kubenswrapper[4651]: I1011 04:51:59.256689 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:59 crc kubenswrapper[4651]: I1011 04:51:59.256741 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:59 crc kubenswrapper[4651]: I1011 04:51:59.256757 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:59 crc kubenswrapper[4651]: I1011 04:51:59.256777 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:59 crc kubenswrapper[4651]: I1011 04:51:59.256792 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:59Z","lastTransitionTime":"2025-10-11T04:51:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:59 crc kubenswrapper[4651]: I1011 04:51:59.359638 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:59 crc kubenswrapper[4651]: I1011 04:51:59.359675 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:59 crc kubenswrapper[4651]: I1011 04:51:59.359686 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:59 crc kubenswrapper[4651]: I1011 04:51:59.359704 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:59 crc kubenswrapper[4651]: I1011 04:51:59.359715 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:59Z","lastTransitionTime":"2025-10-11T04:51:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:59 crc kubenswrapper[4651]: I1011 04:51:59.462697 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:59 crc kubenswrapper[4651]: I1011 04:51:59.462756 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:59 crc kubenswrapper[4651]: I1011 04:51:59.462785 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:59 crc kubenswrapper[4651]: I1011 04:51:59.462803 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:59 crc kubenswrapper[4651]: I1011 04:51:59.462814 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:59Z","lastTransitionTime":"2025-10-11T04:51:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:59 crc kubenswrapper[4651]: I1011 04:51:59.565838 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:59 crc kubenswrapper[4651]: I1011 04:51:59.565881 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:59 crc kubenswrapper[4651]: I1011 04:51:59.565893 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:59 crc kubenswrapper[4651]: I1011 04:51:59.565910 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:59 crc kubenswrapper[4651]: I1011 04:51:59.565923 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:59Z","lastTransitionTime":"2025-10-11T04:51:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:59 crc kubenswrapper[4651]: I1011 04:51:59.669448 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:59 crc kubenswrapper[4651]: I1011 04:51:59.669517 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:59 crc kubenswrapper[4651]: I1011 04:51:59.669526 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:59 crc kubenswrapper[4651]: I1011 04:51:59.669545 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:59 crc kubenswrapper[4651]: I1011 04:51:59.669553 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:59Z","lastTransitionTime":"2025-10-11T04:51:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:59 crc kubenswrapper[4651]: I1011 04:51:59.771425 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:59 crc kubenswrapper[4651]: I1011 04:51:59.771480 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:59 crc kubenswrapper[4651]: I1011 04:51:59.771499 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:59 crc kubenswrapper[4651]: I1011 04:51:59.771522 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:59 crc kubenswrapper[4651]: I1011 04:51:59.771539 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:59Z","lastTransitionTime":"2025-10-11T04:51:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:59 crc kubenswrapper[4651]: I1011 04:51:59.868861 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 04:51:59 crc kubenswrapper[4651]: I1011 04:51:59.869035 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tgvv8" Oct 11 04:51:59 crc kubenswrapper[4651]: E1011 04:51:59.869135 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 04:51:59 crc kubenswrapper[4651]: I1011 04:51:59.869177 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 04:51:59 crc kubenswrapper[4651]: E1011 04:51:59.869327 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tgvv8" podUID="a551fed8-58fb-48ae-88af-8dc0cb48fc30" Oct 11 04:51:59 crc kubenswrapper[4651]: E1011 04:51:59.869437 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 04:51:59 crc kubenswrapper[4651]: I1011 04:51:59.873992 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:59 crc kubenswrapper[4651]: I1011 04:51:59.874044 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:59 crc kubenswrapper[4651]: I1011 04:51:59.874061 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:59 crc kubenswrapper[4651]: I1011 04:51:59.874083 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:59 crc kubenswrapper[4651]: I1011 04:51:59.874102 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:59Z","lastTransitionTime":"2025-10-11T04:51:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:59 crc kubenswrapper[4651]: I1011 04:51:59.889024 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:59Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:59 crc kubenswrapper[4651]: I1011 04:51:59.901566 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kwhmr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbcac3cb-b774-47ec-a86c-b22191d14d99\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52a0f9e11cf4e8c69d5b356b702ce9df8cbc935daadadf531004a23e5f6dad65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7n87\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kwhmr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:59Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:59 crc kubenswrapper[4651]: I1011 04:51:59.934139 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0fd0aaae2760a1c934aed54b8e07528f0f78db1149c9e1f8674bec90b61a7078\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://788d6a46d740de869fe7cd1d28479f8115a3515c7bd2482e06cbc4dba7b27fac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68954082d4369b9b4d95ba1f15e19593dc330b698562be413055b2ba012b5f8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3eed8335754437c935f07223bf4c4f40f9ab0bbdc9a86b7610f7f6fd55140e37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23b3fde15e637081cbf5fad2e55ccea1f0fe63d39ff5f82a60ce05ffeef9a837\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99f927067aabe4690e99fe50a069afdf68b22d950a8ae141235571ced468c44e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c5fa48d147adc4e46f3bbc60135517a4177c9db0a70c637de5eb499189d3912\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c5fa48d147adc4e46f3bbc60135517a4177c9db0a70c637de5eb499189d3912\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-11T04:51:51Z\\\",\\\"message\\\":\\\"dk6g\\\\nI1011 04:51:51.073982 6068 ovn.go:134] Ensuring zone local for Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g in node crc\\\\nF1011 04:51:51.073979 6068 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:51Z is after 2025-08-24T17:21:41Z]\\\\nI1011 04:51:51.073992 6068 model_client.go:382] Update operations generated as: [{Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.59 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {dce28c51-c9f1-478b-97c8-7e209d6e7cbe}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:50Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-6zt9s_openshift-ovn-kubernetes(28e01c08-a461-4f44-a49c-4bf92fd3a2ce)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://add3ed68e0bf59bc569248a3660cfc9a20995641ec65b0bf7133e47c366aad73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccd6180f4c5665d62ee9458795318ef2474c1938d19e6cbfec9c2174fe10e020\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ccd6180f4c5665d62ee9458795318ef2474c1938d19e6cbfec9c2174fe10e020\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6zt9s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:59Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:59 crc kubenswrapper[4651]: I1011 04:51:59.958002 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pgwvb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"215170a8-84a9-4e15-9b0e-c1200c680f30\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a036609457cdb5a74a4ccf65ffa2e502f32994e6e3f1ef8a4fd4b89c5454773\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c80b1c5385cb92e140c43bc982ee7cda6406eac9dfe8d9449eaf45332ccc506\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c80b1c5385cb92e140c43bc982ee7cda6406eac9dfe8d9449eaf45332ccc506\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1575fc8fd03d4047a3ed1c0c9df99b77e40296b876d2913a790128f39733d45e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1575fc8fd03d4047a3ed1c0c9df99b77e40296b876d2913a790128f39733d45e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://626e511c110034060639be8565505dc122e834fc63a76d9c4e21dd1dbd5d5a62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://626e511c110034060639be8565505dc122e834fc63a76d9c4e21dd1dbd5d5a62\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9d55cd3f36dcad738ffd05b10e3f665c6bbb949c25365340e70b3c4b53e4175\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9d55cd3f36dcad738ffd05b10e3f665c6bbb949c25365340e70b3c4b53e4175\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27f664812354e7e2ea5feaa075c7f9f72262ddf0a7959196f7a7b14be5af1924\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://27f664812354e7e2ea5feaa075c7f9f72262ddf0a7959196f7a7b14be5af1924\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3b9eccb883d262047b87eae2358f6d27cbd8df5b6917125a8a6259b2eb3433a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3b9eccb883d262047b87eae2358f6d27cbd8df5b6917125a8a6259b2eb3433a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pgwvb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:59Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:59 crc kubenswrapper[4651]: I1011 04:51:59.976201 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:51:59 crc kubenswrapper[4651]: I1011 04:51:59.976227 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:51:59 crc kubenswrapper[4651]: I1011 04:51:59.976235 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:51:59 crc kubenswrapper[4651]: I1011 04:51:59.976249 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:51:59 crc kubenswrapper[4651]: I1011 04:51:59.976257 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:51:59Z","lastTransitionTime":"2025-10-11T04:51:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:51:59 crc kubenswrapper[4651]: I1011 04:51:59.979671 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qsgwc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f5830d7-3c6f-48f0-b103-a228e7f8e448\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c307ee086caacd0a5299325126ff0b4b140aa1e18153b8f66696534543bb069\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pbjvz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e30f3d9db8e89696a8a98ab24ca995088afa72c957e19ca59ace228a23268d9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pbjvz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:52Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qsgwc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:59Z is after 2025-08-24T17:21:41Z" Oct 11 04:51:59 crc kubenswrapper[4651]: I1011 04:51:59.999469 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d425a775a5e278ee77231764e58cac3441e04c383eee54f69ba84488ca640eae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:59Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:00 crc kubenswrapper[4651]: I1011 04:52:00.018107 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55e4c9c2d46f67bb0f8ccdb2de27dc82f0280476613b3b30d587ff6f5784e787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:00Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:00 crc kubenswrapper[4651]: I1011 04:52:00.038698 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e5b8526fed77f2e541d6793dc36a902daebce60d367f28f9efb45f7fabb44fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e278c9a5d14f026733737226a6ffc91202b760e48622d5d0031678981fa5f857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:00Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:00 crc kubenswrapper[4651]: I1011 04:52:00.059777 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wz4hw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fbfdd781-994b-49b4-9c8e-edc0ea4145d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d37b5524c49533e7dad8b61f0e20b7d1e37b7ae1afc5b9bfe9146a0744202a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c96nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wz4hw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:00Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:00 crc kubenswrapper[4651]: I1011 04:52:00.078693 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:00 crc kubenswrapper[4651]: I1011 04:52:00.078743 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:00 crc kubenswrapper[4651]: I1011 04:52:00.078753 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:00 crc kubenswrapper[4651]: I1011 04:52:00.078769 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:00 crc kubenswrapper[4651]: I1011 04:52:00.078781 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:00Z","lastTransitionTime":"2025-10-11T04:52:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:00 crc kubenswrapper[4651]: I1011 04:52:00.081698 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47b36edf-f918-4105-bb64-66bb71a4b8c4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9521c3af1580aa2a62fc760916de9dc4abdc437700567a6ae5df3a96da2e8942\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6fdcb798e11fca3b7b427f5ef0ac955a893709ffedaa472aaab6c219994e940\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff57b9678d77f617cfc792258091526c2a2f8125c240abfe0b15f693abc948eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11e12782179b96dac12bfdd3a25526e93f18da255e2d2fb0d7522e6d9c373b4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e14956f18479aa61fa0755b9c6c4f26ba7ac18f3023e177a76b9a59101b37c1\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-11T04:51:33Z\\\",\\\"message\\\":\\\"W1011 04:51:22.963111 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1011 04:51:22.963696 1 crypto.go:601] Generating new CA for check-endpoints-signer@1760158282 cert, and key in /tmp/serving-cert-3362973750/serving-signer.crt, /tmp/serving-cert-3362973750/serving-signer.key\\\\nI1011 04:51:23.197526 1 observer_polling.go:159] Starting file observer\\\\nW1011 04:51:23.200452 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1011 04:51:23.200572 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1011 04:51:23.202397 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3362973750/tls.crt::/tmp/serving-cert-3362973750/tls.key\\\\\\\"\\\\nF1011 04:51:33.422520 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f8633747abf191f040cb14f5a76529487743229338070aeca597fc93dae8b3f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55c5a1cac9b1045ca931771e5a096d29e8d986a4d9baae8d779d94f2a30b6cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55c5a1cac9b1045ca931771e5a096d29e8d986a4d9baae8d779d94f2a30b6cd3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:00Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:00 crc kubenswrapper[4651]: I1011 04:52:00.098512 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"744aa461-2868-440d-a1b9-6d30c0c50b56\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aabba06dd47ad528f830a47cc79ddabb3789d24ebed62a6e8f976df1b156ef1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb996f17546fc77ac1b8ed27428aaa6060822daa0156cf016dbb24ed278403a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76ee2612930be6596c0186cc000d80039b52e225fd64102002ed9316a0605521\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c6263dc47a8284e9849da1fb15c4788174252f3637665ac6e4b19298ca90c587\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:00Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:00 crc kubenswrapper[4651]: I1011 04:52:00.116129 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:00Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:00 crc kubenswrapper[4651]: I1011 04:52:00.129457 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"519a1ae1-e964-48b0-8b61-835146df28c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f12e36a570de649e1df8107ea828cd8e65c18e111de191d3796fc1f3134e43c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wkk99\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://583486a7d6efd8d25aaeac78ea92f84834730c641516b6ade77aeda2baef58e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wkk99\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-78jnv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:00Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:00 crc kubenswrapper[4651]: I1011 04:52:00.142620 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-phsgk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e8fb74d-013d-4103-b029-e8416d079dcf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b7407b7da9820bbcdac4ce9c49c4520834a466b0db187c5f861972713ba1583f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4ftf8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-phsgk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:00Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:00 crc kubenswrapper[4651]: I1011 04:52:00.159652 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:00Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:00 crc kubenswrapper[4651]: I1011 04:52:00.176312 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-tgvv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a551fed8-58fb-48ae-88af-8dc0cb48fc30\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:53Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:53Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:53Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nphhq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nphhq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:53Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-tgvv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:00Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:00 crc kubenswrapper[4651]: I1011 04:52:00.180920 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:00 crc kubenswrapper[4651]: I1011 04:52:00.180983 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:00 crc kubenswrapper[4651]: I1011 04:52:00.181005 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:00 crc kubenswrapper[4651]: I1011 04:52:00.181031 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:00 crc kubenswrapper[4651]: I1011 04:52:00.181049 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:00Z","lastTransitionTime":"2025-10-11T04:52:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:00 crc kubenswrapper[4651]: I1011 04:52:00.284947 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:00 crc kubenswrapper[4651]: I1011 04:52:00.285052 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:00 crc kubenswrapper[4651]: I1011 04:52:00.285076 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:00 crc kubenswrapper[4651]: I1011 04:52:00.285105 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:00 crc kubenswrapper[4651]: I1011 04:52:00.285127 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:00Z","lastTransitionTime":"2025-10-11T04:52:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:00 crc kubenswrapper[4651]: I1011 04:52:00.388503 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:00 crc kubenswrapper[4651]: I1011 04:52:00.388975 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:00 crc kubenswrapper[4651]: I1011 04:52:00.389000 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:00 crc kubenswrapper[4651]: I1011 04:52:00.389031 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:00 crc kubenswrapper[4651]: I1011 04:52:00.389055 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:00Z","lastTransitionTime":"2025-10-11T04:52:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:00 crc kubenswrapper[4651]: I1011 04:52:00.491512 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:00 crc kubenswrapper[4651]: I1011 04:52:00.491772 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:00 crc kubenswrapper[4651]: I1011 04:52:00.491892 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:00 crc kubenswrapper[4651]: I1011 04:52:00.491983 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:00 crc kubenswrapper[4651]: I1011 04:52:00.492061 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:00Z","lastTransitionTime":"2025-10-11T04:52:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:00 crc kubenswrapper[4651]: I1011 04:52:00.594239 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:00 crc kubenswrapper[4651]: I1011 04:52:00.594516 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:00 crc kubenswrapper[4651]: I1011 04:52:00.594621 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:00 crc kubenswrapper[4651]: I1011 04:52:00.594707 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:00 crc kubenswrapper[4651]: I1011 04:52:00.594801 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:00Z","lastTransitionTime":"2025-10-11T04:52:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:00 crc kubenswrapper[4651]: I1011 04:52:00.698626 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:00 crc kubenswrapper[4651]: I1011 04:52:00.698692 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:00 crc kubenswrapper[4651]: I1011 04:52:00.698719 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:00 crc kubenswrapper[4651]: I1011 04:52:00.698747 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:00 crc kubenswrapper[4651]: I1011 04:52:00.698770 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:00Z","lastTransitionTime":"2025-10-11T04:52:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:00 crc kubenswrapper[4651]: I1011 04:52:00.801955 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:00 crc kubenswrapper[4651]: I1011 04:52:00.802003 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:00 crc kubenswrapper[4651]: I1011 04:52:00.802016 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:00 crc kubenswrapper[4651]: I1011 04:52:00.802034 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:00 crc kubenswrapper[4651]: I1011 04:52:00.802047 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:00Z","lastTransitionTime":"2025-10-11T04:52:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:00 crc kubenswrapper[4651]: I1011 04:52:00.868743 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 04:52:00 crc kubenswrapper[4651]: E1011 04:52:00.868917 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 04:52:00 crc kubenswrapper[4651]: I1011 04:52:00.904853 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:00 crc kubenswrapper[4651]: I1011 04:52:00.904892 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:00 crc kubenswrapper[4651]: I1011 04:52:00.904905 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:00 crc kubenswrapper[4651]: I1011 04:52:00.904922 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:00 crc kubenswrapper[4651]: I1011 04:52:00.904937 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:00Z","lastTransitionTime":"2025-10-11T04:52:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:01 crc kubenswrapper[4651]: I1011 04:52:01.007167 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:01 crc kubenswrapper[4651]: I1011 04:52:01.007213 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:01 crc kubenswrapper[4651]: I1011 04:52:01.007224 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:01 crc kubenswrapper[4651]: I1011 04:52:01.007241 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:01 crc kubenswrapper[4651]: I1011 04:52:01.007253 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:01Z","lastTransitionTime":"2025-10-11T04:52:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:01 crc kubenswrapper[4651]: I1011 04:52:01.110640 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:01 crc kubenswrapper[4651]: I1011 04:52:01.110710 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:01 crc kubenswrapper[4651]: I1011 04:52:01.110735 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:01 crc kubenswrapper[4651]: I1011 04:52:01.110766 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:01 crc kubenswrapper[4651]: I1011 04:52:01.110789 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:01Z","lastTransitionTime":"2025-10-11T04:52:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:01 crc kubenswrapper[4651]: I1011 04:52:01.213881 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:01 crc kubenswrapper[4651]: I1011 04:52:01.213971 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:01 crc kubenswrapper[4651]: I1011 04:52:01.213991 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:01 crc kubenswrapper[4651]: I1011 04:52:01.214016 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:01 crc kubenswrapper[4651]: I1011 04:52:01.214035 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:01Z","lastTransitionTime":"2025-10-11T04:52:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:01 crc kubenswrapper[4651]: I1011 04:52:01.295587 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a551fed8-58fb-48ae-88af-8dc0cb48fc30-metrics-certs\") pod \"network-metrics-daemon-tgvv8\" (UID: \"a551fed8-58fb-48ae-88af-8dc0cb48fc30\") " pod="openshift-multus/network-metrics-daemon-tgvv8" Oct 11 04:52:01 crc kubenswrapper[4651]: E1011 04:52:01.295785 4651 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 11 04:52:01 crc kubenswrapper[4651]: E1011 04:52:01.295912 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a551fed8-58fb-48ae-88af-8dc0cb48fc30-metrics-certs podName:a551fed8-58fb-48ae-88af-8dc0cb48fc30 nodeName:}" failed. No retries permitted until 2025-10-11 04:52:09.295887816 +0000 UTC m=+50.192120652 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/a551fed8-58fb-48ae-88af-8dc0cb48fc30-metrics-certs") pod "network-metrics-daemon-tgvv8" (UID: "a551fed8-58fb-48ae-88af-8dc0cb48fc30") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 11 04:52:01 crc kubenswrapper[4651]: I1011 04:52:01.317388 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:01 crc kubenswrapper[4651]: I1011 04:52:01.317488 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:01 crc kubenswrapper[4651]: I1011 04:52:01.317507 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:01 crc kubenswrapper[4651]: I1011 04:52:01.317533 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:01 crc kubenswrapper[4651]: I1011 04:52:01.317550 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:01Z","lastTransitionTime":"2025-10-11T04:52:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:01 crc kubenswrapper[4651]: I1011 04:52:01.419932 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:01 crc kubenswrapper[4651]: I1011 04:52:01.419987 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:01 crc kubenswrapper[4651]: I1011 04:52:01.420008 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:01 crc kubenswrapper[4651]: I1011 04:52:01.420028 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:01 crc kubenswrapper[4651]: I1011 04:52:01.420046 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:01Z","lastTransitionTime":"2025-10-11T04:52:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:01 crc kubenswrapper[4651]: I1011 04:52:01.523196 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:01 crc kubenswrapper[4651]: I1011 04:52:01.523264 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:01 crc kubenswrapper[4651]: I1011 04:52:01.523281 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:01 crc kubenswrapper[4651]: I1011 04:52:01.523309 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:01 crc kubenswrapper[4651]: I1011 04:52:01.523333 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:01Z","lastTransitionTime":"2025-10-11T04:52:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:01 crc kubenswrapper[4651]: I1011 04:52:01.625727 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:01 crc kubenswrapper[4651]: I1011 04:52:01.625761 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:01 crc kubenswrapper[4651]: I1011 04:52:01.625789 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:01 crc kubenswrapper[4651]: I1011 04:52:01.625804 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:01 crc kubenswrapper[4651]: I1011 04:52:01.625813 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:01Z","lastTransitionTime":"2025-10-11T04:52:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:01 crc kubenswrapper[4651]: I1011 04:52:01.728337 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:01 crc kubenswrapper[4651]: I1011 04:52:01.728371 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:01 crc kubenswrapper[4651]: I1011 04:52:01.728379 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:01 crc kubenswrapper[4651]: I1011 04:52:01.728391 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:01 crc kubenswrapper[4651]: I1011 04:52:01.728402 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:01Z","lastTransitionTime":"2025-10-11T04:52:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:01 crc kubenswrapper[4651]: I1011 04:52:01.831016 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:01 crc kubenswrapper[4651]: I1011 04:52:01.831078 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:01 crc kubenswrapper[4651]: I1011 04:52:01.831094 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:01 crc kubenswrapper[4651]: I1011 04:52:01.831117 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:01 crc kubenswrapper[4651]: I1011 04:52:01.831135 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:01Z","lastTransitionTime":"2025-10-11T04:52:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:01 crc kubenswrapper[4651]: I1011 04:52:01.869183 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tgvv8" Oct 11 04:52:01 crc kubenswrapper[4651]: I1011 04:52:01.869236 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 04:52:01 crc kubenswrapper[4651]: E1011 04:52:01.869333 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tgvv8" podUID="a551fed8-58fb-48ae-88af-8dc0cb48fc30" Oct 11 04:52:01 crc kubenswrapper[4651]: I1011 04:52:01.869345 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 04:52:01 crc kubenswrapper[4651]: E1011 04:52:01.869662 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 04:52:01 crc kubenswrapper[4651]: E1011 04:52:01.869749 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 04:52:01 crc kubenswrapper[4651]: I1011 04:52:01.934070 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:01 crc kubenswrapper[4651]: I1011 04:52:01.934128 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:01 crc kubenswrapper[4651]: I1011 04:52:01.934139 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:01 crc kubenswrapper[4651]: I1011 04:52:01.934155 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:01 crc kubenswrapper[4651]: I1011 04:52:01.934167 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:01Z","lastTransitionTime":"2025-10-11T04:52:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:02 crc kubenswrapper[4651]: I1011 04:52:02.037008 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:02 crc kubenswrapper[4651]: I1011 04:52:02.037092 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:02 crc kubenswrapper[4651]: I1011 04:52:02.037111 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:02 crc kubenswrapper[4651]: I1011 04:52:02.037137 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:02 crc kubenswrapper[4651]: I1011 04:52:02.037156 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:02Z","lastTransitionTime":"2025-10-11T04:52:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:02 crc kubenswrapper[4651]: I1011 04:52:02.139812 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:02 crc kubenswrapper[4651]: I1011 04:52:02.139901 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:02 crc kubenswrapper[4651]: I1011 04:52:02.139914 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:02 crc kubenswrapper[4651]: I1011 04:52:02.139937 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:02 crc kubenswrapper[4651]: I1011 04:52:02.139957 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:02Z","lastTransitionTime":"2025-10-11T04:52:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:02 crc kubenswrapper[4651]: I1011 04:52:02.242310 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:02 crc kubenswrapper[4651]: I1011 04:52:02.242374 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:02 crc kubenswrapper[4651]: I1011 04:52:02.242394 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:02 crc kubenswrapper[4651]: I1011 04:52:02.242420 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:02 crc kubenswrapper[4651]: I1011 04:52:02.242438 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:02Z","lastTransitionTime":"2025-10-11T04:52:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:02 crc kubenswrapper[4651]: I1011 04:52:02.345970 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:02 crc kubenswrapper[4651]: I1011 04:52:02.346043 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:02 crc kubenswrapper[4651]: I1011 04:52:02.346061 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:02 crc kubenswrapper[4651]: I1011 04:52:02.346116 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:02 crc kubenswrapper[4651]: I1011 04:52:02.346143 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:02Z","lastTransitionTime":"2025-10-11T04:52:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:02 crc kubenswrapper[4651]: I1011 04:52:02.448863 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:02 crc kubenswrapper[4651]: I1011 04:52:02.448928 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:02 crc kubenswrapper[4651]: I1011 04:52:02.448947 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:02 crc kubenswrapper[4651]: I1011 04:52:02.448971 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:02 crc kubenswrapper[4651]: I1011 04:52:02.448988 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:02Z","lastTransitionTime":"2025-10-11T04:52:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:02 crc kubenswrapper[4651]: I1011 04:52:02.552418 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:02 crc kubenswrapper[4651]: I1011 04:52:02.552480 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:02 crc kubenswrapper[4651]: I1011 04:52:02.552498 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:02 crc kubenswrapper[4651]: I1011 04:52:02.552523 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:02 crc kubenswrapper[4651]: I1011 04:52:02.552541 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:02Z","lastTransitionTime":"2025-10-11T04:52:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:02 crc kubenswrapper[4651]: I1011 04:52:02.655795 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:02 crc kubenswrapper[4651]: I1011 04:52:02.655890 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:02 crc kubenswrapper[4651]: I1011 04:52:02.655909 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:02 crc kubenswrapper[4651]: I1011 04:52:02.655933 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:02 crc kubenswrapper[4651]: I1011 04:52:02.655952 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:02Z","lastTransitionTime":"2025-10-11T04:52:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:02 crc kubenswrapper[4651]: I1011 04:52:02.758393 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:02 crc kubenswrapper[4651]: I1011 04:52:02.758441 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:02 crc kubenswrapper[4651]: I1011 04:52:02.758450 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:02 crc kubenswrapper[4651]: I1011 04:52:02.758465 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:02 crc kubenswrapper[4651]: I1011 04:52:02.758477 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:02Z","lastTransitionTime":"2025-10-11T04:52:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:02 crc kubenswrapper[4651]: I1011 04:52:02.860595 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:02 crc kubenswrapper[4651]: I1011 04:52:02.860655 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:02 crc kubenswrapper[4651]: I1011 04:52:02.860671 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:02 crc kubenswrapper[4651]: I1011 04:52:02.860694 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:02 crc kubenswrapper[4651]: I1011 04:52:02.860710 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:02Z","lastTransitionTime":"2025-10-11T04:52:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:02 crc kubenswrapper[4651]: I1011 04:52:02.869270 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 04:52:02 crc kubenswrapper[4651]: E1011 04:52:02.869463 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 04:52:02 crc kubenswrapper[4651]: I1011 04:52:02.963964 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:02 crc kubenswrapper[4651]: I1011 04:52:02.964037 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:02 crc kubenswrapper[4651]: I1011 04:52:02.964056 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:02 crc kubenswrapper[4651]: I1011 04:52:02.964079 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:02 crc kubenswrapper[4651]: I1011 04:52:02.964096 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:02Z","lastTransitionTime":"2025-10-11T04:52:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:03 crc kubenswrapper[4651]: I1011 04:52:03.067223 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:03 crc kubenswrapper[4651]: I1011 04:52:03.067285 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:03 crc kubenswrapper[4651]: I1011 04:52:03.067307 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:03 crc kubenswrapper[4651]: I1011 04:52:03.067334 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:03 crc kubenswrapper[4651]: I1011 04:52:03.067356 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:03Z","lastTransitionTime":"2025-10-11T04:52:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:03 crc kubenswrapper[4651]: I1011 04:52:03.170343 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:03 crc kubenswrapper[4651]: I1011 04:52:03.170403 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:03 crc kubenswrapper[4651]: I1011 04:52:03.170420 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:03 crc kubenswrapper[4651]: I1011 04:52:03.170442 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:03 crc kubenswrapper[4651]: I1011 04:52:03.170461 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:03Z","lastTransitionTime":"2025-10-11T04:52:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:03 crc kubenswrapper[4651]: I1011 04:52:03.273350 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:03 crc kubenswrapper[4651]: I1011 04:52:03.273419 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:03 crc kubenswrapper[4651]: I1011 04:52:03.273436 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:03 crc kubenswrapper[4651]: I1011 04:52:03.273463 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:03 crc kubenswrapper[4651]: I1011 04:52:03.273478 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:03Z","lastTransitionTime":"2025-10-11T04:52:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:03 crc kubenswrapper[4651]: I1011 04:52:03.377034 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:03 crc kubenswrapper[4651]: I1011 04:52:03.377096 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:03 crc kubenswrapper[4651]: I1011 04:52:03.377113 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:03 crc kubenswrapper[4651]: I1011 04:52:03.377139 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:03 crc kubenswrapper[4651]: I1011 04:52:03.377157 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:03Z","lastTransitionTime":"2025-10-11T04:52:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:03 crc kubenswrapper[4651]: I1011 04:52:03.480187 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:03 crc kubenswrapper[4651]: I1011 04:52:03.480244 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:03 crc kubenswrapper[4651]: I1011 04:52:03.480264 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:03 crc kubenswrapper[4651]: I1011 04:52:03.480286 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:03 crc kubenswrapper[4651]: I1011 04:52:03.480360 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:03Z","lastTransitionTime":"2025-10-11T04:52:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:03 crc kubenswrapper[4651]: I1011 04:52:03.583150 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:03 crc kubenswrapper[4651]: I1011 04:52:03.583276 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:03 crc kubenswrapper[4651]: I1011 04:52:03.583362 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:03 crc kubenswrapper[4651]: I1011 04:52:03.583395 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:03 crc kubenswrapper[4651]: I1011 04:52:03.583423 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:03Z","lastTransitionTime":"2025-10-11T04:52:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:03 crc kubenswrapper[4651]: I1011 04:52:03.686029 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:03 crc kubenswrapper[4651]: I1011 04:52:03.686087 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:03 crc kubenswrapper[4651]: I1011 04:52:03.686105 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:03 crc kubenswrapper[4651]: I1011 04:52:03.686130 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:03 crc kubenswrapper[4651]: I1011 04:52:03.686147 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:03Z","lastTransitionTime":"2025-10-11T04:52:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:03 crc kubenswrapper[4651]: I1011 04:52:03.789690 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:03 crc kubenswrapper[4651]: I1011 04:52:03.789773 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:03 crc kubenswrapper[4651]: I1011 04:52:03.789797 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:03 crc kubenswrapper[4651]: I1011 04:52:03.789864 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:03 crc kubenswrapper[4651]: I1011 04:52:03.789884 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:03Z","lastTransitionTime":"2025-10-11T04:52:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:03 crc kubenswrapper[4651]: I1011 04:52:03.869100 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 04:52:03 crc kubenswrapper[4651]: I1011 04:52:03.869126 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 04:52:03 crc kubenswrapper[4651]: I1011 04:52:03.869324 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tgvv8" Oct 11 04:52:03 crc kubenswrapper[4651]: E1011 04:52:03.869354 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 04:52:03 crc kubenswrapper[4651]: E1011 04:52:03.869414 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 04:52:03 crc kubenswrapper[4651]: E1011 04:52:03.869481 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tgvv8" podUID="a551fed8-58fb-48ae-88af-8dc0cb48fc30" Oct 11 04:52:03 crc kubenswrapper[4651]: I1011 04:52:03.892741 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:03 crc kubenswrapper[4651]: I1011 04:52:03.892809 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:03 crc kubenswrapper[4651]: I1011 04:52:03.892870 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:03 crc kubenswrapper[4651]: I1011 04:52:03.892909 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:03 crc kubenswrapper[4651]: I1011 04:52:03.892929 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:03Z","lastTransitionTime":"2025-10-11T04:52:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:03 crc kubenswrapper[4651]: I1011 04:52:03.996088 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:03 crc kubenswrapper[4651]: I1011 04:52:03.996193 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:03 crc kubenswrapper[4651]: I1011 04:52:03.996211 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:03 crc kubenswrapper[4651]: I1011 04:52:03.996273 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:03 crc kubenswrapper[4651]: I1011 04:52:03.996293 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:03Z","lastTransitionTime":"2025-10-11T04:52:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:04 crc kubenswrapper[4651]: I1011 04:52:04.099936 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:04 crc kubenswrapper[4651]: I1011 04:52:04.099998 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:04 crc kubenswrapper[4651]: I1011 04:52:04.100014 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:04 crc kubenswrapper[4651]: I1011 04:52:04.100037 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:04 crc kubenswrapper[4651]: I1011 04:52:04.100054 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:04Z","lastTransitionTime":"2025-10-11T04:52:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:04 crc kubenswrapper[4651]: I1011 04:52:04.203447 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:04 crc kubenswrapper[4651]: I1011 04:52:04.203506 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:04 crc kubenswrapper[4651]: I1011 04:52:04.203523 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:04 crc kubenswrapper[4651]: I1011 04:52:04.203547 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:04 crc kubenswrapper[4651]: I1011 04:52:04.203590 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:04Z","lastTransitionTime":"2025-10-11T04:52:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:04 crc kubenswrapper[4651]: I1011 04:52:04.307211 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:04 crc kubenswrapper[4651]: I1011 04:52:04.307289 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:04 crc kubenswrapper[4651]: I1011 04:52:04.307312 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:04 crc kubenswrapper[4651]: I1011 04:52:04.307342 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:04 crc kubenswrapper[4651]: I1011 04:52:04.307385 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:04Z","lastTransitionTime":"2025-10-11T04:52:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:04 crc kubenswrapper[4651]: I1011 04:52:04.411139 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:04 crc kubenswrapper[4651]: I1011 04:52:04.411198 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:04 crc kubenswrapper[4651]: I1011 04:52:04.411210 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:04 crc kubenswrapper[4651]: I1011 04:52:04.411230 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:04 crc kubenswrapper[4651]: I1011 04:52:04.411245 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:04Z","lastTransitionTime":"2025-10-11T04:52:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:04 crc kubenswrapper[4651]: I1011 04:52:04.515348 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:04 crc kubenswrapper[4651]: I1011 04:52:04.515412 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:04 crc kubenswrapper[4651]: I1011 04:52:04.515423 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:04 crc kubenswrapper[4651]: I1011 04:52:04.515446 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:04 crc kubenswrapper[4651]: I1011 04:52:04.515459 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:04Z","lastTransitionTime":"2025-10-11T04:52:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:04 crc kubenswrapper[4651]: I1011 04:52:04.619910 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:04 crc kubenswrapper[4651]: I1011 04:52:04.620010 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:04 crc kubenswrapper[4651]: I1011 04:52:04.620035 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:04 crc kubenswrapper[4651]: I1011 04:52:04.620075 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:04 crc kubenswrapper[4651]: I1011 04:52:04.620098 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:04Z","lastTransitionTime":"2025-10-11T04:52:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:04 crc kubenswrapper[4651]: I1011 04:52:04.724320 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:04 crc kubenswrapper[4651]: I1011 04:52:04.724408 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:04 crc kubenswrapper[4651]: I1011 04:52:04.724427 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:04 crc kubenswrapper[4651]: I1011 04:52:04.724460 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:04 crc kubenswrapper[4651]: I1011 04:52:04.724480 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:04Z","lastTransitionTime":"2025-10-11T04:52:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:04 crc kubenswrapper[4651]: I1011 04:52:04.828208 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:04 crc kubenswrapper[4651]: I1011 04:52:04.828283 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:04 crc kubenswrapper[4651]: I1011 04:52:04.828306 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:04 crc kubenswrapper[4651]: I1011 04:52:04.828335 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:04 crc kubenswrapper[4651]: I1011 04:52:04.828355 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:04Z","lastTransitionTime":"2025-10-11T04:52:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:04 crc kubenswrapper[4651]: I1011 04:52:04.869431 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 04:52:04 crc kubenswrapper[4651]: E1011 04:52:04.869699 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 04:52:04 crc kubenswrapper[4651]: I1011 04:52:04.931803 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:04 crc kubenswrapper[4651]: I1011 04:52:04.931908 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:04 crc kubenswrapper[4651]: I1011 04:52:04.931959 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:04 crc kubenswrapper[4651]: I1011 04:52:04.931993 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:04 crc kubenswrapper[4651]: I1011 04:52:04.932019 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:04Z","lastTransitionTime":"2025-10-11T04:52:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:05 crc kubenswrapper[4651]: I1011 04:52:05.034353 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:05 crc kubenswrapper[4651]: I1011 04:52:05.034424 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:05 crc kubenswrapper[4651]: I1011 04:52:05.034449 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:05 crc kubenswrapper[4651]: I1011 04:52:05.034485 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:05 crc kubenswrapper[4651]: I1011 04:52:05.034505 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:05Z","lastTransitionTime":"2025-10-11T04:52:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:05 crc kubenswrapper[4651]: I1011 04:52:05.138070 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:05 crc kubenswrapper[4651]: I1011 04:52:05.138153 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:05 crc kubenswrapper[4651]: I1011 04:52:05.138172 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:05 crc kubenswrapper[4651]: I1011 04:52:05.138199 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:05 crc kubenswrapper[4651]: I1011 04:52:05.138217 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:05Z","lastTransitionTime":"2025-10-11T04:52:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:05 crc kubenswrapper[4651]: I1011 04:52:05.241057 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:05 crc kubenswrapper[4651]: I1011 04:52:05.241113 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:05 crc kubenswrapper[4651]: I1011 04:52:05.241131 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:05 crc kubenswrapper[4651]: I1011 04:52:05.241153 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:05 crc kubenswrapper[4651]: I1011 04:52:05.241171 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:05Z","lastTransitionTime":"2025-10-11T04:52:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:05 crc kubenswrapper[4651]: I1011 04:52:05.345040 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:05 crc kubenswrapper[4651]: I1011 04:52:05.345110 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:05 crc kubenswrapper[4651]: I1011 04:52:05.345129 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:05 crc kubenswrapper[4651]: I1011 04:52:05.345156 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:05 crc kubenswrapper[4651]: I1011 04:52:05.345174 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:05Z","lastTransitionTime":"2025-10-11T04:52:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:05 crc kubenswrapper[4651]: I1011 04:52:05.448141 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:05 crc kubenswrapper[4651]: I1011 04:52:05.448206 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:05 crc kubenswrapper[4651]: I1011 04:52:05.448223 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:05 crc kubenswrapper[4651]: I1011 04:52:05.448248 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:05 crc kubenswrapper[4651]: I1011 04:52:05.448266 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:05Z","lastTransitionTime":"2025-10-11T04:52:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:05 crc kubenswrapper[4651]: I1011 04:52:05.551286 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:05 crc kubenswrapper[4651]: I1011 04:52:05.551345 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:05 crc kubenswrapper[4651]: I1011 04:52:05.551356 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:05 crc kubenswrapper[4651]: I1011 04:52:05.551376 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:05 crc kubenswrapper[4651]: I1011 04:52:05.551389 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:05Z","lastTransitionTime":"2025-10-11T04:52:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:05 crc kubenswrapper[4651]: I1011 04:52:05.654315 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:05 crc kubenswrapper[4651]: I1011 04:52:05.654385 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:05 crc kubenswrapper[4651]: I1011 04:52:05.654408 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:05 crc kubenswrapper[4651]: I1011 04:52:05.654433 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:05 crc kubenswrapper[4651]: I1011 04:52:05.654451 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:05Z","lastTransitionTime":"2025-10-11T04:52:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:05 crc kubenswrapper[4651]: I1011 04:52:05.757746 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:05 crc kubenswrapper[4651]: I1011 04:52:05.757791 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:05 crc kubenswrapper[4651]: I1011 04:52:05.757809 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:05 crc kubenswrapper[4651]: I1011 04:52:05.757838 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:05 crc kubenswrapper[4651]: I1011 04:52:05.757849 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:05Z","lastTransitionTime":"2025-10-11T04:52:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:05 crc kubenswrapper[4651]: I1011 04:52:05.861868 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:05 crc kubenswrapper[4651]: I1011 04:52:05.862279 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:05 crc kubenswrapper[4651]: I1011 04:52:05.862389 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:05 crc kubenswrapper[4651]: I1011 04:52:05.862558 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:05 crc kubenswrapper[4651]: I1011 04:52:05.862593 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:05Z","lastTransitionTime":"2025-10-11T04:52:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:05 crc kubenswrapper[4651]: I1011 04:52:05.869269 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 04:52:05 crc kubenswrapper[4651]: I1011 04:52:05.869350 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tgvv8" Oct 11 04:52:05 crc kubenswrapper[4651]: I1011 04:52:05.869267 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 04:52:05 crc kubenswrapper[4651]: E1011 04:52:05.869441 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 04:52:05 crc kubenswrapper[4651]: E1011 04:52:05.869643 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tgvv8" podUID="a551fed8-58fb-48ae-88af-8dc0cb48fc30" Oct 11 04:52:05 crc kubenswrapper[4651]: E1011 04:52:05.869792 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 04:52:05 crc kubenswrapper[4651]: I1011 04:52:05.966397 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:05 crc kubenswrapper[4651]: I1011 04:52:05.966483 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:05 crc kubenswrapper[4651]: I1011 04:52:05.966500 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:05 crc kubenswrapper[4651]: I1011 04:52:05.966525 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:05 crc kubenswrapper[4651]: I1011 04:52:05.966542 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:05Z","lastTransitionTime":"2025-10-11T04:52:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:06 crc kubenswrapper[4651]: I1011 04:52:06.069315 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:06 crc kubenswrapper[4651]: I1011 04:52:06.069384 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:06 crc kubenswrapper[4651]: I1011 04:52:06.069404 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:06 crc kubenswrapper[4651]: I1011 04:52:06.069431 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:06 crc kubenswrapper[4651]: I1011 04:52:06.069450 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:06Z","lastTransitionTime":"2025-10-11T04:52:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:06 crc kubenswrapper[4651]: I1011 04:52:06.137305 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:06 crc kubenswrapper[4651]: I1011 04:52:06.137418 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:06 crc kubenswrapper[4651]: I1011 04:52:06.137438 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:06 crc kubenswrapper[4651]: I1011 04:52:06.137467 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:06 crc kubenswrapper[4651]: I1011 04:52:06.137485 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:06Z","lastTransitionTime":"2025-10-11T04:52:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:06 crc kubenswrapper[4651]: E1011 04:52:06.158965 4651 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a821a5c3-63e0-43db-82e0-e9c6e98ead52\\\",\\\"systemUUID\\\":\\\"f1c4ea71-0c28-43a7-99a4-e27ff72e186a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:06Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:06 crc kubenswrapper[4651]: I1011 04:52:06.164696 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:06 crc kubenswrapper[4651]: I1011 04:52:06.164798 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:06 crc kubenswrapper[4651]: I1011 04:52:06.164840 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:06 crc kubenswrapper[4651]: I1011 04:52:06.164866 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:06 crc kubenswrapper[4651]: I1011 04:52:06.164885 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:06Z","lastTransitionTime":"2025-10-11T04:52:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:06 crc kubenswrapper[4651]: E1011 04:52:06.186572 4651 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a821a5c3-63e0-43db-82e0-e9c6e98ead52\\\",\\\"systemUUID\\\":\\\"f1c4ea71-0c28-43a7-99a4-e27ff72e186a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:06Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:06 crc kubenswrapper[4651]: I1011 04:52:06.196488 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:06 crc kubenswrapper[4651]: I1011 04:52:06.196554 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:06 crc kubenswrapper[4651]: I1011 04:52:06.196571 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:06 crc kubenswrapper[4651]: I1011 04:52:06.196595 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:06 crc kubenswrapper[4651]: I1011 04:52:06.196615 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:06Z","lastTransitionTime":"2025-10-11T04:52:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:06 crc kubenswrapper[4651]: E1011 04:52:06.218547 4651 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a821a5c3-63e0-43db-82e0-e9c6e98ead52\\\",\\\"systemUUID\\\":\\\"f1c4ea71-0c28-43a7-99a4-e27ff72e186a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:06Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:06 crc kubenswrapper[4651]: I1011 04:52:06.223309 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:06 crc kubenswrapper[4651]: I1011 04:52:06.223365 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:06 crc kubenswrapper[4651]: I1011 04:52:06.223383 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:06 crc kubenswrapper[4651]: I1011 04:52:06.223408 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:06 crc kubenswrapper[4651]: I1011 04:52:06.223426 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:06Z","lastTransitionTime":"2025-10-11T04:52:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:06 crc kubenswrapper[4651]: E1011 04:52:06.240788 4651 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a821a5c3-63e0-43db-82e0-e9c6e98ead52\\\",\\\"systemUUID\\\":\\\"f1c4ea71-0c28-43a7-99a4-e27ff72e186a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:06Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:06 crc kubenswrapper[4651]: I1011 04:52:06.245805 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:06 crc kubenswrapper[4651]: I1011 04:52:06.245997 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:06 crc kubenswrapper[4651]: I1011 04:52:06.246081 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:06 crc kubenswrapper[4651]: I1011 04:52:06.246197 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:06 crc kubenswrapper[4651]: I1011 04:52:06.246308 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:06Z","lastTransitionTime":"2025-10-11T04:52:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:06 crc kubenswrapper[4651]: E1011 04:52:06.267192 4651 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a821a5c3-63e0-43db-82e0-e9c6e98ead52\\\",\\\"systemUUID\\\":\\\"f1c4ea71-0c28-43a7-99a4-e27ff72e186a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:06Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:06 crc kubenswrapper[4651]: E1011 04:52:06.267617 4651 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 11 04:52:06 crc kubenswrapper[4651]: I1011 04:52:06.269998 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:06 crc kubenswrapper[4651]: I1011 04:52:06.270181 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:06 crc kubenswrapper[4651]: I1011 04:52:06.270302 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:06 crc kubenswrapper[4651]: I1011 04:52:06.270434 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:06 crc kubenswrapper[4651]: I1011 04:52:06.270539 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:06Z","lastTransitionTime":"2025-10-11T04:52:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:06 crc kubenswrapper[4651]: I1011 04:52:06.372948 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:06 crc kubenswrapper[4651]: I1011 04:52:06.373013 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:06 crc kubenswrapper[4651]: I1011 04:52:06.373035 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:06 crc kubenswrapper[4651]: I1011 04:52:06.373064 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:06 crc kubenswrapper[4651]: I1011 04:52:06.373085 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:06Z","lastTransitionTime":"2025-10-11T04:52:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:06 crc kubenswrapper[4651]: I1011 04:52:06.475680 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:06 crc kubenswrapper[4651]: I1011 04:52:06.475762 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:06 crc kubenswrapper[4651]: I1011 04:52:06.475785 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:06 crc kubenswrapper[4651]: I1011 04:52:06.475849 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:06 crc kubenswrapper[4651]: I1011 04:52:06.475876 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:06Z","lastTransitionTime":"2025-10-11T04:52:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:06 crc kubenswrapper[4651]: I1011 04:52:06.579614 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:06 crc kubenswrapper[4651]: I1011 04:52:06.580519 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:06 crc kubenswrapper[4651]: I1011 04:52:06.580566 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:06 crc kubenswrapper[4651]: I1011 04:52:06.580601 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:06 crc kubenswrapper[4651]: I1011 04:52:06.580626 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:06Z","lastTransitionTime":"2025-10-11T04:52:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:06 crc kubenswrapper[4651]: I1011 04:52:06.682744 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:06 crc kubenswrapper[4651]: I1011 04:52:06.682805 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:06 crc kubenswrapper[4651]: I1011 04:52:06.682893 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:06 crc kubenswrapper[4651]: I1011 04:52:06.682943 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:06 crc kubenswrapper[4651]: I1011 04:52:06.682981 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:06Z","lastTransitionTime":"2025-10-11T04:52:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:06 crc kubenswrapper[4651]: I1011 04:52:06.785599 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:06 crc kubenswrapper[4651]: I1011 04:52:06.785635 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:06 crc kubenswrapper[4651]: I1011 04:52:06.785645 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:06 crc kubenswrapper[4651]: I1011 04:52:06.785659 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:06 crc kubenswrapper[4651]: I1011 04:52:06.785669 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:06Z","lastTransitionTime":"2025-10-11T04:52:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:06 crc kubenswrapper[4651]: I1011 04:52:06.869193 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 04:52:06 crc kubenswrapper[4651]: E1011 04:52:06.869383 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 04:52:06 crc kubenswrapper[4651]: I1011 04:52:06.888613 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:06 crc kubenswrapper[4651]: I1011 04:52:06.888904 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:06 crc kubenswrapper[4651]: I1011 04:52:06.889192 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:06 crc kubenswrapper[4651]: I1011 04:52:06.889489 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:06 crc kubenswrapper[4651]: I1011 04:52:06.889749 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:06Z","lastTransitionTime":"2025-10-11T04:52:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:06 crc kubenswrapper[4651]: I1011 04:52:06.992677 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:06 crc kubenswrapper[4651]: I1011 04:52:06.992731 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:06 crc kubenswrapper[4651]: I1011 04:52:06.992752 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:06 crc kubenswrapper[4651]: I1011 04:52:06.992784 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:06 crc kubenswrapper[4651]: I1011 04:52:06.992807 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:06Z","lastTransitionTime":"2025-10-11T04:52:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:07 crc kubenswrapper[4651]: I1011 04:52:07.106688 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:07 crc kubenswrapper[4651]: I1011 04:52:07.106737 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:07 crc kubenswrapper[4651]: I1011 04:52:07.106746 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:07 crc kubenswrapper[4651]: I1011 04:52:07.106760 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:07 crc kubenswrapper[4651]: I1011 04:52:07.106770 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:07Z","lastTransitionTime":"2025-10-11T04:52:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:07 crc kubenswrapper[4651]: I1011 04:52:07.209069 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:07 crc kubenswrapper[4651]: I1011 04:52:07.209127 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:07 crc kubenswrapper[4651]: I1011 04:52:07.209142 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:07 crc kubenswrapper[4651]: I1011 04:52:07.209163 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:07 crc kubenswrapper[4651]: I1011 04:52:07.209180 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:07Z","lastTransitionTime":"2025-10-11T04:52:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:07 crc kubenswrapper[4651]: I1011 04:52:07.311708 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:07 crc kubenswrapper[4651]: I1011 04:52:07.311775 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:07 crc kubenswrapper[4651]: I1011 04:52:07.311794 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:07 crc kubenswrapper[4651]: I1011 04:52:07.311848 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:07 crc kubenswrapper[4651]: I1011 04:52:07.311869 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:07Z","lastTransitionTime":"2025-10-11T04:52:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:07 crc kubenswrapper[4651]: I1011 04:52:07.415041 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:07 crc kubenswrapper[4651]: I1011 04:52:07.415101 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:07 crc kubenswrapper[4651]: I1011 04:52:07.415119 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:07 crc kubenswrapper[4651]: I1011 04:52:07.415152 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:07 crc kubenswrapper[4651]: I1011 04:52:07.415188 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:07Z","lastTransitionTime":"2025-10-11T04:52:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:07 crc kubenswrapper[4651]: I1011 04:52:07.518300 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:07 crc kubenswrapper[4651]: I1011 04:52:07.518354 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:07 crc kubenswrapper[4651]: I1011 04:52:07.518370 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:07 crc kubenswrapper[4651]: I1011 04:52:07.518393 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:07 crc kubenswrapper[4651]: I1011 04:52:07.518410 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:07Z","lastTransitionTime":"2025-10-11T04:52:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:07 crc kubenswrapper[4651]: I1011 04:52:07.621918 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:07 crc kubenswrapper[4651]: I1011 04:52:07.622005 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:07 crc kubenswrapper[4651]: I1011 04:52:07.622024 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:07 crc kubenswrapper[4651]: I1011 04:52:07.622050 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:07 crc kubenswrapper[4651]: I1011 04:52:07.622068 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:07Z","lastTransitionTime":"2025-10-11T04:52:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:07 crc kubenswrapper[4651]: I1011 04:52:07.725404 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:07 crc kubenswrapper[4651]: I1011 04:52:07.725465 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:07 crc kubenswrapper[4651]: I1011 04:52:07.725489 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:07 crc kubenswrapper[4651]: I1011 04:52:07.725539 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:07 crc kubenswrapper[4651]: I1011 04:52:07.725561 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:07Z","lastTransitionTime":"2025-10-11T04:52:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:07 crc kubenswrapper[4651]: I1011 04:52:07.828345 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:07 crc kubenswrapper[4651]: I1011 04:52:07.828416 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:07 crc kubenswrapper[4651]: I1011 04:52:07.828444 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:07 crc kubenswrapper[4651]: I1011 04:52:07.828476 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:07 crc kubenswrapper[4651]: I1011 04:52:07.828498 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:07Z","lastTransitionTime":"2025-10-11T04:52:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:07 crc kubenswrapper[4651]: I1011 04:52:07.869306 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 04:52:07 crc kubenswrapper[4651]: I1011 04:52:07.869373 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 04:52:07 crc kubenswrapper[4651]: E1011 04:52:07.869714 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 04:52:07 crc kubenswrapper[4651]: I1011 04:52:07.869762 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tgvv8" Oct 11 04:52:07 crc kubenswrapper[4651]: E1011 04:52:07.869987 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 04:52:07 crc kubenswrapper[4651]: E1011 04:52:07.870182 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tgvv8" podUID="a551fed8-58fb-48ae-88af-8dc0cb48fc30" Oct 11 04:52:07 crc kubenswrapper[4651]: I1011 04:52:07.931638 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:07 crc kubenswrapper[4651]: I1011 04:52:07.931708 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:07 crc kubenswrapper[4651]: I1011 04:52:07.931726 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:07 crc kubenswrapper[4651]: I1011 04:52:07.931751 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:07 crc kubenswrapper[4651]: I1011 04:52:07.931768 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:07Z","lastTransitionTime":"2025-10-11T04:52:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:08 crc kubenswrapper[4651]: I1011 04:52:08.036175 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:08 crc kubenswrapper[4651]: I1011 04:52:08.036266 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:08 crc kubenswrapper[4651]: I1011 04:52:08.036286 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:08 crc kubenswrapper[4651]: I1011 04:52:08.036315 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:08 crc kubenswrapper[4651]: I1011 04:52:08.036343 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:08Z","lastTransitionTime":"2025-10-11T04:52:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:08 crc kubenswrapper[4651]: I1011 04:52:08.139723 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:08 crc kubenswrapper[4651]: I1011 04:52:08.139776 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:08 crc kubenswrapper[4651]: I1011 04:52:08.139792 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:08 crc kubenswrapper[4651]: I1011 04:52:08.139849 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:08 crc kubenswrapper[4651]: I1011 04:52:08.139868 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:08Z","lastTransitionTime":"2025-10-11T04:52:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:08 crc kubenswrapper[4651]: I1011 04:52:08.245290 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:08 crc kubenswrapper[4651]: I1011 04:52:08.245346 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:08 crc kubenswrapper[4651]: I1011 04:52:08.245355 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:08 crc kubenswrapper[4651]: I1011 04:52:08.245386 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:08 crc kubenswrapper[4651]: I1011 04:52:08.245394 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:08Z","lastTransitionTime":"2025-10-11T04:52:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:08 crc kubenswrapper[4651]: I1011 04:52:08.348375 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:08 crc kubenswrapper[4651]: I1011 04:52:08.348952 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:08 crc kubenswrapper[4651]: I1011 04:52:08.349096 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:08 crc kubenswrapper[4651]: I1011 04:52:08.349244 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:08 crc kubenswrapper[4651]: I1011 04:52:08.349388 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:08Z","lastTransitionTime":"2025-10-11T04:52:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:08 crc kubenswrapper[4651]: I1011 04:52:08.451769 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:08 crc kubenswrapper[4651]: I1011 04:52:08.451928 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:08 crc kubenswrapper[4651]: I1011 04:52:08.452155 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:08 crc kubenswrapper[4651]: I1011 04:52:08.452259 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:08 crc kubenswrapper[4651]: I1011 04:52:08.452321 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:08Z","lastTransitionTime":"2025-10-11T04:52:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:08 crc kubenswrapper[4651]: I1011 04:52:08.555702 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:08 crc kubenswrapper[4651]: I1011 04:52:08.555870 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:08 crc kubenswrapper[4651]: I1011 04:52:08.555950 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:08 crc kubenswrapper[4651]: I1011 04:52:08.556021 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:08 crc kubenswrapper[4651]: I1011 04:52:08.556076 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:08Z","lastTransitionTime":"2025-10-11T04:52:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:08 crc kubenswrapper[4651]: I1011 04:52:08.658576 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:08 crc kubenswrapper[4651]: I1011 04:52:08.658620 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:08 crc kubenswrapper[4651]: I1011 04:52:08.658636 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:08 crc kubenswrapper[4651]: I1011 04:52:08.658658 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:08 crc kubenswrapper[4651]: I1011 04:52:08.658674 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:08Z","lastTransitionTime":"2025-10-11T04:52:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:08 crc kubenswrapper[4651]: I1011 04:52:08.760612 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:08 crc kubenswrapper[4651]: I1011 04:52:08.760774 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:08 crc kubenswrapper[4651]: I1011 04:52:08.760847 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:08 crc kubenswrapper[4651]: I1011 04:52:08.760907 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:08 crc kubenswrapper[4651]: I1011 04:52:08.760959 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:08Z","lastTransitionTime":"2025-10-11T04:52:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:08 crc kubenswrapper[4651]: I1011 04:52:08.864121 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:08 crc kubenswrapper[4651]: I1011 04:52:08.864176 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:08 crc kubenswrapper[4651]: I1011 04:52:08.864198 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:08 crc kubenswrapper[4651]: I1011 04:52:08.864226 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:08 crc kubenswrapper[4651]: I1011 04:52:08.864248 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:08Z","lastTransitionTime":"2025-10-11T04:52:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:08 crc kubenswrapper[4651]: I1011 04:52:08.869446 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 04:52:08 crc kubenswrapper[4651]: E1011 04:52:08.869640 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 04:52:08 crc kubenswrapper[4651]: I1011 04:52:08.967376 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:08 crc kubenswrapper[4651]: I1011 04:52:08.967701 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:08 crc kubenswrapper[4651]: I1011 04:52:08.967796 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:08 crc kubenswrapper[4651]: I1011 04:52:08.967908 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:08 crc kubenswrapper[4651]: I1011 04:52:08.968001 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:08Z","lastTransitionTime":"2025-10-11T04:52:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:09 crc kubenswrapper[4651]: I1011 04:52:09.070890 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:09 crc kubenswrapper[4651]: I1011 04:52:09.071184 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:09 crc kubenswrapper[4651]: I1011 04:52:09.071329 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:09 crc kubenswrapper[4651]: I1011 04:52:09.071475 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:09 crc kubenswrapper[4651]: I1011 04:52:09.071603 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:09Z","lastTransitionTime":"2025-10-11T04:52:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:09 crc kubenswrapper[4651]: I1011 04:52:09.177024 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:09 crc kubenswrapper[4651]: I1011 04:52:09.177493 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:09 crc kubenswrapper[4651]: I1011 04:52:09.177767 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:09 crc kubenswrapper[4651]: I1011 04:52:09.178230 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:09 crc kubenswrapper[4651]: I1011 04:52:09.179007 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:09Z","lastTransitionTime":"2025-10-11T04:52:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:09 crc kubenswrapper[4651]: I1011 04:52:09.282723 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:09 crc kubenswrapper[4651]: I1011 04:52:09.282793 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:09 crc kubenswrapper[4651]: I1011 04:52:09.282849 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:09 crc kubenswrapper[4651]: I1011 04:52:09.282884 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:09 crc kubenswrapper[4651]: I1011 04:52:09.282906 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:09Z","lastTransitionTime":"2025-10-11T04:52:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:09 crc kubenswrapper[4651]: I1011 04:52:09.387687 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:09 crc kubenswrapper[4651]: I1011 04:52:09.387765 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:09 crc kubenswrapper[4651]: I1011 04:52:09.387789 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:09 crc kubenswrapper[4651]: I1011 04:52:09.387879 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:09 crc kubenswrapper[4651]: I1011 04:52:09.387914 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:09Z","lastTransitionTime":"2025-10-11T04:52:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:09 crc kubenswrapper[4651]: I1011 04:52:09.388802 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a551fed8-58fb-48ae-88af-8dc0cb48fc30-metrics-certs\") pod \"network-metrics-daemon-tgvv8\" (UID: \"a551fed8-58fb-48ae-88af-8dc0cb48fc30\") " pod="openshift-multus/network-metrics-daemon-tgvv8" Oct 11 04:52:09 crc kubenswrapper[4651]: E1011 04:52:09.389150 4651 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 11 04:52:09 crc kubenswrapper[4651]: E1011 04:52:09.389311 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a551fed8-58fb-48ae-88af-8dc0cb48fc30-metrics-certs podName:a551fed8-58fb-48ae-88af-8dc0cb48fc30 nodeName:}" failed. No retries permitted until 2025-10-11 04:52:25.389265934 +0000 UTC m=+66.285498940 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/a551fed8-58fb-48ae-88af-8dc0cb48fc30-metrics-certs") pod "network-metrics-daemon-tgvv8" (UID: "a551fed8-58fb-48ae-88af-8dc0cb48fc30") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 11 04:52:09 crc kubenswrapper[4651]: I1011 04:52:09.491910 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:09 crc kubenswrapper[4651]: I1011 04:52:09.492259 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:09 crc kubenswrapper[4651]: I1011 04:52:09.492396 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:09 crc kubenswrapper[4651]: I1011 04:52:09.492583 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:09 crc kubenswrapper[4651]: I1011 04:52:09.492725 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:09Z","lastTransitionTime":"2025-10-11T04:52:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:09 crc kubenswrapper[4651]: I1011 04:52:09.596438 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:09 crc kubenswrapper[4651]: I1011 04:52:09.596599 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:09 crc kubenswrapper[4651]: I1011 04:52:09.596620 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:09 crc kubenswrapper[4651]: I1011 04:52:09.596645 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:09 crc kubenswrapper[4651]: I1011 04:52:09.596663 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:09Z","lastTransitionTime":"2025-10-11T04:52:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:09 crc kubenswrapper[4651]: I1011 04:52:09.699483 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:09 crc kubenswrapper[4651]: I1011 04:52:09.699553 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:09 crc kubenswrapper[4651]: I1011 04:52:09.699576 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:09 crc kubenswrapper[4651]: I1011 04:52:09.699607 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:09 crc kubenswrapper[4651]: I1011 04:52:09.699630 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:09Z","lastTransitionTime":"2025-10-11T04:52:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:09 crc kubenswrapper[4651]: I1011 04:52:09.803288 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:09 crc kubenswrapper[4651]: I1011 04:52:09.803428 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:09 crc kubenswrapper[4651]: I1011 04:52:09.803506 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:09 crc kubenswrapper[4651]: I1011 04:52:09.803536 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:09 crc kubenswrapper[4651]: I1011 04:52:09.803553 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:09Z","lastTransitionTime":"2025-10-11T04:52:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:09 crc kubenswrapper[4651]: I1011 04:52:09.869274 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tgvv8" Oct 11 04:52:09 crc kubenswrapper[4651]: I1011 04:52:09.869320 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 04:52:09 crc kubenswrapper[4651]: I1011 04:52:09.869915 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 04:52:09 crc kubenswrapper[4651]: E1011 04:52:09.870125 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tgvv8" podUID="a551fed8-58fb-48ae-88af-8dc0cb48fc30" Oct 11 04:52:09 crc kubenswrapper[4651]: E1011 04:52:09.870367 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 04:52:09 crc kubenswrapper[4651]: E1011 04:52:09.870537 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 04:52:09 crc kubenswrapper[4651]: I1011 04:52:09.870557 4651 scope.go:117] "RemoveContainer" containerID="4c5fa48d147adc4e46f3bbc60135517a4177c9db0a70c637de5eb499189d3912" Oct 11 04:52:09 crc kubenswrapper[4651]: I1011 04:52:09.892430 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d425a775a5e278ee77231764e58cac3441e04c383eee54f69ba84488ca640eae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:09Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:09 crc kubenswrapper[4651]: I1011 04:52:09.909801 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:09 crc kubenswrapper[4651]: I1011 04:52:09.909891 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:09 crc kubenswrapper[4651]: I1011 04:52:09.909914 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:09 crc kubenswrapper[4651]: I1011 04:52:09.909944 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:09 crc kubenswrapper[4651]: I1011 04:52:09.909967 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:09Z","lastTransitionTime":"2025-10-11T04:52:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:09 crc kubenswrapper[4651]: I1011 04:52:09.914181 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55e4c9c2d46f67bb0f8ccdb2de27dc82f0280476613b3b30d587ff6f5784e787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:09Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:09 crc kubenswrapper[4651]: I1011 04:52:09.937529 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e5b8526fed77f2e541d6793dc36a902daebce60d367f28f9efb45f7fabb44fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e278c9a5d14f026733737226a6ffc91202b760e48622d5d0031678981fa5f857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:09Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:09 crc kubenswrapper[4651]: I1011 04:52:09.969502 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wz4hw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fbfdd781-994b-49b4-9c8e-edc0ea4145d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d37b5524c49533e7dad8b61f0e20b7d1e37b7ae1afc5b9bfe9146a0744202a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c96nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wz4hw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:09Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:09 crc kubenswrapper[4651]: I1011 04:52:09.993367 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47b36edf-f918-4105-bb64-66bb71a4b8c4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9521c3af1580aa2a62fc760916de9dc4abdc437700567a6ae5df3a96da2e8942\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6fdcb798e11fca3b7b427f5ef0ac955a893709ffedaa472aaab6c219994e940\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff57b9678d77f617cfc792258091526c2a2f8125c240abfe0b15f693abc948eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11e12782179b96dac12bfdd3a25526e93f18da255e2d2fb0d7522e6d9c373b4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e14956f18479aa61fa0755b9c6c4f26ba7ac18f3023e177a76b9a59101b37c1\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-11T04:51:33Z\\\",\\\"message\\\":\\\"W1011 04:51:22.963111 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1011 04:51:22.963696 1 crypto.go:601] Generating new CA for check-endpoints-signer@1760158282 cert, and key in /tmp/serving-cert-3362973750/serving-signer.crt, /tmp/serving-cert-3362973750/serving-signer.key\\\\nI1011 04:51:23.197526 1 observer_polling.go:159] Starting file observer\\\\nW1011 04:51:23.200452 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1011 04:51:23.200572 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1011 04:51:23.202397 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3362973750/tls.crt::/tmp/serving-cert-3362973750/tls.key\\\\\\\"\\\\nF1011 04:51:33.422520 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f8633747abf191f040cb14f5a76529487743229338070aeca597fc93dae8b3f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55c5a1cac9b1045ca931771e5a096d29e8d986a4d9baae8d779d94f2a30b6cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55c5a1cac9b1045ca931771e5a096d29e8d986a4d9baae8d779d94f2a30b6cd3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:09Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:10 crc kubenswrapper[4651]: I1011 04:52:10.008487 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"744aa461-2868-440d-a1b9-6d30c0c50b56\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aabba06dd47ad528f830a47cc79ddabb3789d24ebed62a6e8f976df1b156ef1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb996f17546fc77ac1b8ed27428aaa6060822daa0156cf016dbb24ed278403a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76ee2612930be6596c0186cc000d80039b52e225fd64102002ed9316a0605521\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c6263dc47a8284e9849da1fb15c4788174252f3637665ac6e4b19298ca90c587\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:10Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:10 crc kubenswrapper[4651]: I1011 04:52:10.014726 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:10 crc kubenswrapper[4651]: I1011 04:52:10.014801 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:10 crc kubenswrapper[4651]: I1011 04:52:10.014858 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:10 crc kubenswrapper[4651]: I1011 04:52:10.014889 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:10 crc kubenswrapper[4651]: I1011 04:52:10.014905 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:10Z","lastTransitionTime":"2025-10-11T04:52:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:10 crc kubenswrapper[4651]: I1011 04:52:10.023742 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:10Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:10 crc kubenswrapper[4651]: I1011 04:52:10.039502 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"519a1ae1-e964-48b0-8b61-835146df28c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f12e36a570de649e1df8107ea828cd8e65c18e111de191d3796fc1f3134e43c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wkk99\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://583486a7d6efd8d25aaeac78ea92f84834730c641516b6ade77aeda2baef58e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wkk99\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-78jnv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:10Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:10 crc kubenswrapper[4651]: I1011 04:52:10.052658 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-phsgk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e8fb74d-013d-4103-b029-e8416d079dcf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b7407b7da9820bbcdac4ce9c49c4520834a466b0db187c5f861972713ba1583f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4ftf8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-phsgk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:10Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:10 crc kubenswrapper[4651]: I1011 04:52:10.072474 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:10Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:10 crc kubenswrapper[4651]: I1011 04:52:10.085784 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-tgvv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a551fed8-58fb-48ae-88af-8dc0cb48fc30\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:53Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:53Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:53Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nphhq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nphhq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:53Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-tgvv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:10Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:10 crc kubenswrapper[4651]: I1011 04:52:10.101362 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:10Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:10 crc kubenswrapper[4651]: I1011 04:52:10.113636 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kwhmr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbcac3cb-b774-47ec-a86c-b22191d14d99\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52a0f9e11cf4e8c69d5b356b702ce9df8cbc935daadadf531004a23e5f6dad65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7n87\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kwhmr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:10Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:10 crc kubenswrapper[4651]: I1011 04:52:10.117747 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:10 crc kubenswrapper[4651]: I1011 04:52:10.117784 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:10 crc kubenswrapper[4651]: I1011 04:52:10.117796 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:10 crc kubenswrapper[4651]: I1011 04:52:10.117887 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:10 crc kubenswrapper[4651]: I1011 04:52:10.117902 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:10Z","lastTransitionTime":"2025-10-11T04:52:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:10 crc kubenswrapper[4651]: I1011 04:52:10.138212 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0fd0aaae2760a1c934aed54b8e07528f0f78db1149c9e1f8674bec90b61a7078\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://788d6a46d740de869fe7cd1d28479f8115a3515c7bd2482e06cbc4dba7b27fac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68954082d4369b9b4d95ba1f15e19593dc330b698562be413055b2ba012b5f8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3eed8335754437c935f07223bf4c4f40f9ab0bbdc9a86b7610f7f6fd55140e37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23b3fde15e637081cbf5fad2e55ccea1f0fe63d39ff5f82a60ce05ffeef9a837\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99f927067aabe4690e99fe50a069afdf68b22d950a8ae141235571ced468c44e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c5fa48d147adc4e46f3bbc60135517a4177c9db0a70c637de5eb499189d3912\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c5fa48d147adc4e46f3bbc60135517a4177c9db0a70c637de5eb499189d3912\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-11T04:51:51Z\\\",\\\"message\\\":\\\"dk6g\\\\nI1011 04:51:51.073982 6068 ovn.go:134] Ensuring zone local for Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g in node crc\\\\nF1011 04:51:51.073979 6068 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:51Z is after 2025-08-24T17:21:41Z]\\\\nI1011 04:51:51.073992 6068 model_client.go:382] Update operations generated as: [{Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.59 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {dce28c51-c9f1-478b-97c8-7e209d6e7cbe}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:50Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-6zt9s_openshift-ovn-kubernetes(28e01c08-a461-4f44-a49c-4bf92fd3a2ce)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://add3ed68e0bf59bc569248a3660cfc9a20995641ec65b0bf7133e47c366aad73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccd6180f4c5665d62ee9458795318ef2474c1938d19e6cbfec9c2174fe10e020\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ccd6180f4c5665d62ee9458795318ef2474c1938d19e6cbfec9c2174fe10e020\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6zt9s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:10Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:10 crc kubenswrapper[4651]: I1011 04:52:10.157700 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pgwvb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"215170a8-84a9-4e15-9b0e-c1200c680f30\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a036609457cdb5a74a4ccf65ffa2e502f32994e6e3f1ef8a4fd4b89c5454773\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c80b1c5385cb92e140c43bc982ee7cda6406eac9dfe8d9449eaf45332ccc506\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c80b1c5385cb92e140c43bc982ee7cda6406eac9dfe8d9449eaf45332ccc506\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1575fc8fd03d4047a3ed1c0c9df99b77e40296b876d2913a790128f39733d45e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1575fc8fd03d4047a3ed1c0c9df99b77e40296b876d2913a790128f39733d45e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://626e511c110034060639be8565505dc122e834fc63a76d9c4e21dd1dbd5d5a62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://626e511c110034060639be8565505dc122e834fc63a76d9c4e21dd1dbd5d5a62\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9d55cd3f36dcad738ffd05b10e3f665c6bbb949c25365340e70b3c4b53e4175\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9d55cd3f36dcad738ffd05b10e3f665c6bbb949c25365340e70b3c4b53e4175\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27f664812354e7e2ea5feaa075c7f9f72262ddf0a7959196f7a7b14be5af1924\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://27f664812354e7e2ea5feaa075c7f9f72262ddf0a7959196f7a7b14be5af1924\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3b9eccb883d262047b87eae2358f6d27cbd8df5b6917125a8a6259b2eb3433a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3b9eccb883d262047b87eae2358f6d27cbd8df5b6917125a8a6259b2eb3433a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pgwvb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:10Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:10 crc kubenswrapper[4651]: I1011 04:52:10.176269 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qsgwc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f5830d7-3c6f-48f0-b103-a228e7f8e448\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c307ee086caacd0a5299325126ff0b4b140aa1e18153b8f66696534543bb069\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pbjvz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e30f3d9db8e89696a8a98ab24ca995088afa72c957e19ca59ace228a23268d9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pbjvz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:52Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qsgwc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:10Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:10 crc kubenswrapper[4651]: I1011 04:52:10.213392 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6zt9s_28e01c08-a461-4f44-a49c-4bf92fd3a2ce/ovnkube-controller/1.log" Oct 11 04:52:10 crc kubenswrapper[4651]: I1011 04:52:10.215728 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" event={"ID":"28e01c08-a461-4f44-a49c-4bf92fd3a2ce","Type":"ContainerStarted","Data":"e0575c5b690a6b170f1e7dbb71dc3d09d4dbb86071a2363d6d7c4c85e2c78320"} Oct 11 04:52:10 crc kubenswrapper[4651]: I1011 04:52:10.216114 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" Oct 11 04:52:10 crc kubenswrapper[4651]: I1011 04:52:10.219868 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:10 crc kubenswrapper[4651]: I1011 04:52:10.219897 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:10 crc kubenswrapper[4651]: I1011 04:52:10.219920 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:10 crc kubenswrapper[4651]: I1011 04:52:10.219930 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:10 crc kubenswrapper[4651]: I1011 04:52:10.219938 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:10Z","lastTransitionTime":"2025-10-11T04:52:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:10 crc kubenswrapper[4651]: I1011 04:52:10.232183 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-phsgk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e8fb74d-013d-4103-b029-e8416d079dcf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b7407b7da9820bbcdac4ce9c49c4520834a466b0db187c5f861972713ba1583f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4ftf8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-phsgk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:10Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:10 crc kubenswrapper[4651]: I1011 04:52:10.247883 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:10Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:10 crc kubenswrapper[4651]: I1011 04:52:10.264216 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-tgvv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a551fed8-58fb-48ae-88af-8dc0cb48fc30\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:53Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:53Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:53Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nphhq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nphhq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:53Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-tgvv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:10Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:10 crc kubenswrapper[4651]: I1011 04:52:10.284123 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:10Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:10 crc kubenswrapper[4651]: I1011 04:52:10.303225 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kwhmr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbcac3cb-b774-47ec-a86c-b22191d14d99\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52a0f9e11cf4e8c69d5b356b702ce9df8cbc935daadadf531004a23e5f6dad65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7n87\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kwhmr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:10Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:10 crc kubenswrapper[4651]: I1011 04:52:10.322551 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:10 crc kubenswrapper[4651]: I1011 04:52:10.322588 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:10 crc kubenswrapper[4651]: I1011 04:52:10.322598 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:10 crc kubenswrapper[4651]: I1011 04:52:10.322612 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:10 crc kubenswrapper[4651]: I1011 04:52:10.322621 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:10Z","lastTransitionTime":"2025-10-11T04:52:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:10 crc kubenswrapper[4651]: I1011 04:52:10.333031 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0fd0aaae2760a1c934aed54b8e07528f0f78db1149c9e1f8674bec90b61a7078\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://788d6a46d740de869fe7cd1d28479f8115a3515c7bd2482e06cbc4dba7b27fac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68954082d4369b9b4d95ba1f15e19593dc330b698562be413055b2ba012b5f8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3eed8335754437c935f07223bf4c4f40f9ab0bbdc9a86b7610f7f6fd55140e37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23b3fde15e637081cbf5fad2e55ccea1f0fe63d39ff5f82a60ce05ffeef9a837\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99f927067aabe4690e99fe50a069afdf68b22d950a8ae141235571ced468c44e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0575c5b690a6b170f1e7dbb71dc3d09d4dbb86071a2363d6d7c4c85e2c78320\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c5fa48d147adc4e46f3bbc60135517a4177c9db0a70c637de5eb499189d3912\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-11T04:51:51Z\\\",\\\"message\\\":\\\"dk6g\\\\nI1011 04:51:51.073982 6068 ovn.go:134] Ensuring zone local for Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g in node crc\\\\nF1011 04:51:51.073979 6068 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:51Z is after 2025-08-24T17:21:41Z]\\\\nI1011 04:51:51.073992 6068 model_client.go:382] Update operations generated as: [{Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.59 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {dce28c51-c9f1-478b-97c8-7e209d6e7cbe}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:50Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:52:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://add3ed68e0bf59bc569248a3660cfc9a20995641ec65b0bf7133e47c366aad73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccd6180f4c5665d62ee9458795318ef2474c1938d19e6cbfec9c2174fe10e020\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ccd6180f4c5665d62ee9458795318ef2474c1938d19e6cbfec9c2174fe10e020\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6zt9s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:10Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:10 crc kubenswrapper[4651]: I1011 04:52:10.359840 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pgwvb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"215170a8-84a9-4e15-9b0e-c1200c680f30\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a036609457cdb5a74a4ccf65ffa2e502f32994e6e3f1ef8a4fd4b89c5454773\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c80b1c5385cb92e140c43bc982ee7cda6406eac9dfe8d9449eaf45332ccc506\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c80b1c5385cb92e140c43bc982ee7cda6406eac9dfe8d9449eaf45332ccc506\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1575fc8fd03d4047a3ed1c0c9df99b77e40296b876d2913a790128f39733d45e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1575fc8fd03d4047a3ed1c0c9df99b77e40296b876d2913a790128f39733d45e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://626e511c110034060639be8565505dc122e834fc63a76d9c4e21dd1dbd5d5a62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://626e511c110034060639be8565505dc122e834fc63a76d9c4e21dd1dbd5d5a62\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9d55cd3f36dcad738ffd05b10e3f665c6bbb949c25365340e70b3c4b53e4175\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9d55cd3f36dcad738ffd05b10e3f665c6bbb949c25365340e70b3c4b53e4175\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27f664812354e7e2ea5feaa075c7f9f72262ddf0a7959196f7a7b14be5af1924\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://27f664812354e7e2ea5feaa075c7f9f72262ddf0a7959196f7a7b14be5af1924\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3b9eccb883d262047b87eae2358f6d27cbd8df5b6917125a8a6259b2eb3433a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3b9eccb883d262047b87eae2358f6d27cbd8df5b6917125a8a6259b2eb3433a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pgwvb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:10Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:10 crc kubenswrapper[4651]: I1011 04:52:10.374934 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qsgwc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f5830d7-3c6f-48f0-b103-a228e7f8e448\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c307ee086caacd0a5299325126ff0b4b140aa1e18153b8f66696534543bb069\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pbjvz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e30f3d9db8e89696a8a98ab24ca995088afa72c957e19ca59ace228a23268d9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pbjvz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:52Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qsgwc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:10Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:10 crc kubenswrapper[4651]: I1011 04:52:10.391063 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d425a775a5e278ee77231764e58cac3441e04c383eee54f69ba84488ca640eae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:10Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:10 crc kubenswrapper[4651]: I1011 04:52:10.408929 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55e4c9c2d46f67bb0f8ccdb2de27dc82f0280476613b3b30d587ff6f5784e787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:10Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:10 crc kubenswrapper[4651]: I1011 04:52:10.421654 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e5b8526fed77f2e541d6793dc36a902daebce60d367f28f9efb45f7fabb44fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e278c9a5d14f026733737226a6ffc91202b760e48622d5d0031678981fa5f857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:10Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:10 crc kubenswrapper[4651]: I1011 04:52:10.424039 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:10 crc kubenswrapper[4651]: I1011 04:52:10.424065 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:10 crc kubenswrapper[4651]: I1011 04:52:10.424073 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:10 crc kubenswrapper[4651]: I1011 04:52:10.424085 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:10 crc kubenswrapper[4651]: I1011 04:52:10.424094 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:10Z","lastTransitionTime":"2025-10-11T04:52:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:10 crc kubenswrapper[4651]: I1011 04:52:10.432404 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"519a1ae1-e964-48b0-8b61-835146df28c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f12e36a570de649e1df8107ea828cd8e65c18e111de191d3796fc1f3134e43c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wkk99\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://583486a7d6efd8d25aaeac78ea92f84834730c641516b6ade77aeda2baef58e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wkk99\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-78jnv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:10Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:10 crc kubenswrapper[4651]: I1011 04:52:10.446955 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wz4hw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fbfdd781-994b-49b4-9c8e-edc0ea4145d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d37b5524c49533e7dad8b61f0e20b7d1e37b7ae1afc5b9bfe9146a0744202a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c96nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wz4hw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:10Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:10 crc kubenswrapper[4651]: I1011 04:52:10.459352 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47b36edf-f918-4105-bb64-66bb71a4b8c4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9521c3af1580aa2a62fc760916de9dc4abdc437700567a6ae5df3a96da2e8942\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6fdcb798e11fca3b7b427f5ef0ac955a893709ffedaa472aaab6c219994e940\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff57b9678d77f617cfc792258091526c2a2f8125c240abfe0b15f693abc948eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11e12782179b96dac12bfdd3a25526e93f18da255e2d2fb0d7522e6d9c373b4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e14956f18479aa61fa0755b9c6c4f26ba7ac18f3023e177a76b9a59101b37c1\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-11T04:51:33Z\\\",\\\"message\\\":\\\"W1011 04:51:22.963111 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1011 04:51:22.963696 1 crypto.go:601] Generating new CA for check-endpoints-signer@1760158282 cert, and key in /tmp/serving-cert-3362973750/serving-signer.crt, /tmp/serving-cert-3362973750/serving-signer.key\\\\nI1011 04:51:23.197526 1 observer_polling.go:159] Starting file observer\\\\nW1011 04:51:23.200452 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1011 04:51:23.200572 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1011 04:51:23.202397 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3362973750/tls.crt::/tmp/serving-cert-3362973750/tls.key\\\\\\\"\\\\nF1011 04:51:33.422520 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f8633747abf191f040cb14f5a76529487743229338070aeca597fc93dae8b3f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55c5a1cac9b1045ca931771e5a096d29e8d986a4d9baae8d779d94f2a30b6cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55c5a1cac9b1045ca931771e5a096d29e8d986a4d9baae8d779d94f2a30b6cd3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:10Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:10 crc kubenswrapper[4651]: I1011 04:52:10.472919 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"744aa461-2868-440d-a1b9-6d30c0c50b56\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aabba06dd47ad528f830a47cc79ddabb3789d24ebed62a6e8f976df1b156ef1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb996f17546fc77ac1b8ed27428aaa6060822daa0156cf016dbb24ed278403a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76ee2612930be6596c0186cc000d80039b52e225fd64102002ed9316a0605521\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c6263dc47a8284e9849da1fb15c4788174252f3637665ac6e4b19298ca90c587\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:10Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:10 crc kubenswrapper[4651]: I1011 04:52:10.485946 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:10Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:10 crc kubenswrapper[4651]: I1011 04:52:10.526647 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:10 crc kubenswrapper[4651]: I1011 04:52:10.526689 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:10 crc kubenswrapper[4651]: I1011 04:52:10.526701 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:10 crc kubenswrapper[4651]: I1011 04:52:10.526718 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:10 crc kubenswrapper[4651]: I1011 04:52:10.526729 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:10Z","lastTransitionTime":"2025-10-11T04:52:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:10 crc kubenswrapper[4651]: I1011 04:52:10.629650 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:10 crc kubenswrapper[4651]: I1011 04:52:10.629704 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:10 crc kubenswrapper[4651]: I1011 04:52:10.629717 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:10 crc kubenswrapper[4651]: I1011 04:52:10.629732 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:10 crc kubenswrapper[4651]: I1011 04:52:10.629744 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:10Z","lastTransitionTime":"2025-10-11T04:52:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:10 crc kubenswrapper[4651]: I1011 04:52:10.732235 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:10 crc kubenswrapper[4651]: I1011 04:52:10.732269 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:10 crc kubenswrapper[4651]: I1011 04:52:10.732280 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:10 crc kubenswrapper[4651]: I1011 04:52:10.732296 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:10 crc kubenswrapper[4651]: I1011 04:52:10.732308 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:10Z","lastTransitionTime":"2025-10-11T04:52:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:10 crc kubenswrapper[4651]: I1011 04:52:10.834389 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:10 crc kubenswrapper[4651]: I1011 04:52:10.834420 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:10 crc kubenswrapper[4651]: I1011 04:52:10.834431 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:10 crc kubenswrapper[4651]: I1011 04:52:10.834446 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:10 crc kubenswrapper[4651]: I1011 04:52:10.834458 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:10Z","lastTransitionTime":"2025-10-11T04:52:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:10 crc kubenswrapper[4651]: I1011 04:52:10.868973 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 04:52:10 crc kubenswrapper[4651]: E1011 04:52:10.869093 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 04:52:10 crc kubenswrapper[4651]: I1011 04:52:10.936352 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:10 crc kubenswrapper[4651]: I1011 04:52:10.936412 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:10 crc kubenswrapper[4651]: I1011 04:52:10.936423 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:10 crc kubenswrapper[4651]: I1011 04:52:10.936437 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:10 crc kubenswrapper[4651]: I1011 04:52:10.936445 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:10Z","lastTransitionTime":"2025-10-11T04:52:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:11 crc kubenswrapper[4651]: I1011 04:52:11.040425 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:11 crc kubenswrapper[4651]: I1011 04:52:11.040468 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:11 crc kubenswrapper[4651]: I1011 04:52:11.040482 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:11 crc kubenswrapper[4651]: I1011 04:52:11.040502 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:11 crc kubenswrapper[4651]: I1011 04:52:11.040518 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:11Z","lastTransitionTime":"2025-10-11T04:52:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:11 crc kubenswrapper[4651]: I1011 04:52:11.142779 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:11 crc kubenswrapper[4651]: I1011 04:52:11.142807 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:11 crc kubenswrapper[4651]: I1011 04:52:11.142828 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:11 crc kubenswrapper[4651]: I1011 04:52:11.142841 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:11 crc kubenswrapper[4651]: I1011 04:52:11.142849 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:11Z","lastTransitionTime":"2025-10-11T04:52:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:11 crc kubenswrapper[4651]: I1011 04:52:11.220574 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6zt9s_28e01c08-a461-4f44-a49c-4bf92fd3a2ce/ovnkube-controller/2.log" Oct 11 04:52:11 crc kubenswrapper[4651]: I1011 04:52:11.221123 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6zt9s_28e01c08-a461-4f44-a49c-4bf92fd3a2ce/ovnkube-controller/1.log" Oct 11 04:52:11 crc kubenswrapper[4651]: I1011 04:52:11.224857 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" event={"ID":"28e01c08-a461-4f44-a49c-4bf92fd3a2ce","Type":"ContainerDied","Data":"e0575c5b690a6b170f1e7dbb71dc3d09d4dbb86071a2363d6d7c4c85e2c78320"} Oct 11 04:52:11 crc kubenswrapper[4651]: I1011 04:52:11.224980 4651 scope.go:117] "RemoveContainer" containerID="4c5fa48d147adc4e46f3bbc60135517a4177c9db0a70c637de5eb499189d3912" Oct 11 04:52:11 crc kubenswrapper[4651]: I1011 04:52:11.224811 4651 generic.go:334] "Generic (PLEG): container finished" podID="28e01c08-a461-4f44-a49c-4bf92fd3a2ce" containerID="e0575c5b690a6b170f1e7dbb71dc3d09d4dbb86071a2363d6d7c4c85e2c78320" exitCode=1 Oct 11 04:52:11 crc kubenswrapper[4651]: I1011 04:52:11.225757 4651 scope.go:117] "RemoveContainer" containerID="e0575c5b690a6b170f1e7dbb71dc3d09d4dbb86071a2363d6d7c4c85e2c78320" Oct 11 04:52:11 crc kubenswrapper[4651]: E1011 04:52:11.226013 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-6zt9s_openshift-ovn-kubernetes(28e01c08-a461-4f44-a49c-4bf92fd3a2ce)\"" pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" podUID="28e01c08-a461-4f44-a49c-4bf92fd3a2ce" Oct 11 04:52:11 crc kubenswrapper[4651]: I1011 04:52:11.244608 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:11 crc kubenswrapper[4651]: I1011 04:52:11.244668 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:11 crc kubenswrapper[4651]: I1011 04:52:11.244691 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:11 crc kubenswrapper[4651]: I1011 04:52:11.244718 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:11 crc kubenswrapper[4651]: I1011 04:52:11.244740 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:11Z","lastTransitionTime":"2025-10-11T04:52:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:11 crc kubenswrapper[4651]: I1011 04:52:11.246749 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qsgwc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f5830d7-3c6f-48f0-b103-a228e7f8e448\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c307ee086caacd0a5299325126ff0b4b140aa1e18153b8f66696534543bb069\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pbjvz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e30f3d9db8e89696a8a98ab24ca995088afa72c957e19ca59ace228a23268d9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pbjvz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:52Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qsgwc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:11Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:11 crc kubenswrapper[4651]: I1011 04:52:11.260802 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:11Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:11 crc kubenswrapper[4651]: I1011 04:52:11.270125 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kwhmr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbcac3cb-b774-47ec-a86c-b22191d14d99\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52a0f9e11cf4e8c69d5b356b702ce9df8cbc935daadadf531004a23e5f6dad65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7n87\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kwhmr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:11Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:11 crc kubenswrapper[4651]: I1011 04:52:11.298759 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0fd0aaae2760a1c934aed54b8e07528f0f78db1149c9e1f8674bec90b61a7078\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://788d6a46d740de869fe7cd1d28479f8115a3515c7bd2482e06cbc4dba7b27fac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68954082d4369b9b4d95ba1f15e19593dc330b698562be413055b2ba012b5f8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3eed8335754437c935f07223bf4c4f40f9ab0bbdc9a86b7610f7f6fd55140e37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23b3fde15e637081cbf5fad2e55ccea1f0fe63d39ff5f82a60ce05ffeef9a837\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99f927067aabe4690e99fe50a069afdf68b22d950a8ae141235571ced468c44e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0575c5b690a6b170f1e7dbb71dc3d09d4dbb86071a2363d6d7c4c85e2c78320\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c5fa48d147adc4e46f3bbc60135517a4177c9db0a70c637de5eb499189d3912\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-11T04:51:51Z\\\",\\\"message\\\":\\\"dk6g\\\\nI1011 04:51:51.073982 6068 ovn.go:134] Ensuring zone local for Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g in node crc\\\\nF1011 04:51:51.073979 6068 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:51:51Z is after 2025-08-24T17:21:41Z]\\\\nI1011 04:51:51.073992 6068 model_client.go:382] Update operations generated as: [{Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.59 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {dce28c51-c9f1-478b-97c8-7e209d6e7cbe}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:50Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0575c5b690a6b170f1e7dbb71dc3d09d4dbb86071a2363d6d7c4c85e2c78320\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-11T04:52:10Z\\\",\\\"message\\\":\\\"andler 8\\\\nI1011 04:52:10.796228 6293 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1011 04:52:10.796255 6293 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1011 04:52:10.796280 6293 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1011 04:52:10.796326 6293 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1011 04:52:10.796340 6293 handler.go:208] Removed *v1.Node event handler 2\\\\nI1011 04:52:10.796359 6293 handler.go:208] Removed *v1.Node event handler 7\\\\nI1011 04:52:10.796378 6293 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI1011 04:52:10.796447 6293 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1011 04:52:10.796476 6293 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI1011 04:52:10.796536 6293 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1011 04:52:10.796553 6293 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1011 04:52:10.796592 6293 factory.go:656] Stopping watch factory\\\\nI1011 04:52:10.796634 6293 ovnkube.go:599] Stopped ovnkube\\\\nI1011 04:52:10.796638 6293 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1011 04:52:10.796674 6293 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1011 04:52:10.796792 6293 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T04:52:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://add3ed68e0bf59bc569248a3660cfc9a20995641ec65b0bf7133e47c366aad73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccd6180f4c5665d62ee9458795318ef2474c1938d19e6cbfec9c2174fe10e020\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ccd6180f4c5665d62ee9458795318ef2474c1938d19e6cbfec9c2174fe10e020\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6zt9s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:11Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:11 crc kubenswrapper[4651]: I1011 04:52:11.316305 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pgwvb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"215170a8-84a9-4e15-9b0e-c1200c680f30\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a036609457cdb5a74a4ccf65ffa2e502f32994e6e3f1ef8a4fd4b89c5454773\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c80b1c5385cb92e140c43bc982ee7cda6406eac9dfe8d9449eaf45332ccc506\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c80b1c5385cb92e140c43bc982ee7cda6406eac9dfe8d9449eaf45332ccc506\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1575fc8fd03d4047a3ed1c0c9df99b77e40296b876d2913a790128f39733d45e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1575fc8fd03d4047a3ed1c0c9df99b77e40296b876d2913a790128f39733d45e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://626e511c110034060639be8565505dc122e834fc63a76d9c4e21dd1dbd5d5a62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://626e511c110034060639be8565505dc122e834fc63a76d9c4e21dd1dbd5d5a62\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9d55cd3f36dcad738ffd05b10e3f665c6bbb949c25365340e70b3c4b53e4175\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9d55cd3f36dcad738ffd05b10e3f665c6bbb949c25365340e70b3c4b53e4175\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27f664812354e7e2ea5feaa075c7f9f72262ddf0a7959196f7a7b14be5af1924\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://27f664812354e7e2ea5feaa075c7f9f72262ddf0a7959196f7a7b14be5af1924\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3b9eccb883d262047b87eae2358f6d27cbd8df5b6917125a8a6259b2eb3433a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3b9eccb883d262047b87eae2358f6d27cbd8df5b6917125a8a6259b2eb3433a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pgwvb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:11Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:11 crc kubenswrapper[4651]: I1011 04:52:11.347154 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:11 crc kubenswrapper[4651]: I1011 04:52:11.347192 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:11 crc kubenswrapper[4651]: I1011 04:52:11.347202 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:11 crc kubenswrapper[4651]: I1011 04:52:11.347217 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:11 crc kubenswrapper[4651]: I1011 04:52:11.347227 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:11Z","lastTransitionTime":"2025-10-11T04:52:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:11 crc kubenswrapper[4651]: I1011 04:52:11.353400 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d425a775a5e278ee77231764e58cac3441e04c383eee54f69ba84488ca640eae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:11Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:11 crc kubenswrapper[4651]: I1011 04:52:11.375601 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55e4c9c2d46f67bb0f8ccdb2de27dc82f0280476613b3b30d587ff6f5784e787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:11Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:11 crc kubenswrapper[4651]: I1011 04:52:11.394174 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e5b8526fed77f2e541d6793dc36a902daebce60d367f28f9efb45f7fabb44fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e278c9a5d14f026733737226a6ffc91202b760e48622d5d0031678981fa5f857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:11Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:11 crc kubenswrapper[4651]: I1011 04:52:11.405248 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:11Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:11 crc kubenswrapper[4651]: I1011 04:52:11.415031 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"519a1ae1-e964-48b0-8b61-835146df28c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f12e36a570de649e1df8107ea828cd8e65c18e111de191d3796fc1f3134e43c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wkk99\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://583486a7d6efd8d25aaeac78ea92f84834730c641516b6ade77aeda2baef58e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wkk99\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-78jnv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:11Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:11 crc kubenswrapper[4651]: I1011 04:52:11.426372 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wz4hw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fbfdd781-994b-49b4-9c8e-edc0ea4145d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d37b5524c49533e7dad8b61f0e20b7d1e37b7ae1afc5b9bfe9146a0744202a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c96nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wz4hw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:11Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:11 crc kubenswrapper[4651]: I1011 04:52:11.437291 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47b36edf-f918-4105-bb64-66bb71a4b8c4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9521c3af1580aa2a62fc760916de9dc4abdc437700567a6ae5df3a96da2e8942\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6fdcb798e11fca3b7b427f5ef0ac955a893709ffedaa472aaab6c219994e940\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff57b9678d77f617cfc792258091526c2a2f8125c240abfe0b15f693abc948eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11e12782179b96dac12bfdd3a25526e93f18da255e2d2fb0d7522e6d9c373b4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e14956f18479aa61fa0755b9c6c4f26ba7ac18f3023e177a76b9a59101b37c1\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-11T04:51:33Z\\\",\\\"message\\\":\\\"W1011 04:51:22.963111 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1011 04:51:22.963696 1 crypto.go:601] Generating new CA for check-endpoints-signer@1760158282 cert, and key in /tmp/serving-cert-3362973750/serving-signer.crt, /tmp/serving-cert-3362973750/serving-signer.key\\\\nI1011 04:51:23.197526 1 observer_polling.go:159] Starting file observer\\\\nW1011 04:51:23.200452 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1011 04:51:23.200572 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1011 04:51:23.202397 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3362973750/tls.crt::/tmp/serving-cert-3362973750/tls.key\\\\\\\"\\\\nF1011 04:51:33.422520 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f8633747abf191f040cb14f5a76529487743229338070aeca597fc93dae8b3f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55c5a1cac9b1045ca931771e5a096d29e8d986a4d9baae8d779d94f2a30b6cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55c5a1cac9b1045ca931771e5a096d29e8d986a4d9baae8d779d94f2a30b6cd3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:11Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:11 crc kubenswrapper[4651]: I1011 04:52:11.446976 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"744aa461-2868-440d-a1b9-6d30c0c50b56\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aabba06dd47ad528f830a47cc79ddabb3789d24ebed62a6e8f976df1b156ef1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb996f17546fc77ac1b8ed27428aaa6060822daa0156cf016dbb24ed278403a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76ee2612930be6596c0186cc000d80039b52e225fd64102002ed9316a0605521\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c6263dc47a8284e9849da1fb15c4788174252f3637665ac6e4b19298ca90c587\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:11Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:11 crc kubenswrapper[4651]: I1011 04:52:11.449212 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:11 crc kubenswrapper[4651]: I1011 04:52:11.449245 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:11 crc kubenswrapper[4651]: I1011 04:52:11.449257 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:11 crc kubenswrapper[4651]: I1011 04:52:11.449271 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:11 crc kubenswrapper[4651]: I1011 04:52:11.449280 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:11Z","lastTransitionTime":"2025-10-11T04:52:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:11 crc kubenswrapper[4651]: I1011 04:52:11.456304 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-phsgk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e8fb74d-013d-4103-b029-e8416d079dcf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b7407b7da9820bbcdac4ce9c49c4520834a466b0db187c5f861972713ba1583f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4ftf8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-phsgk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:11Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:11 crc kubenswrapper[4651]: I1011 04:52:11.467858 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:11Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:11 crc kubenswrapper[4651]: I1011 04:52:11.477451 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-tgvv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a551fed8-58fb-48ae-88af-8dc0cb48fc30\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:53Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:53Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:53Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nphhq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nphhq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:53Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-tgvv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:11Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:11 crc kubenswrapper[4651]: I1011 04:52:11.551903 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:11 crc kubenswrapper[4651]: I1011 04:52:11.552196 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:11 crc kubenswrapper[4651]: I1011 04:52:11.552369 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:11 crc kubenswrapper[4651]: I1011 04:52:11.552532 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:11 crc kubenswrapper[4651]: I1011 04:52:11.552667 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:11Z","lastTransitionTime":"2025-10-11T04:52:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:11 crc kubenswrapper[4651]: I1011 04:52:11.617925 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 04:52:11 crc kubenswrapper[4651]: E1011 04:52:11.618214 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 04:52:43.618182324 +0000 UTC m=+84.514415160 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 04:52:11 crc kubenswrapper[4651]: I1011 04:52:11.618295 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 04:52:11 crc kubenswrapper[4651]: I1011 04:52:11.618382 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 04:52:11 crc kubenswrapper[4651]: E1011 04:52:11.618578 4651 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 11 04:52:11 crc kubenswrapper[4651]: E1011 04:52:11.618649 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-11 04:52:43.618633945 +0000 UTC m=+84.514866781 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 11 04:52:11 crc kubenswrapper[4651]: E1011 04:52:11.619031 4651 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 11 04:52:11 crc kubenswrapper[4651]: E1011 04:52:11.619242 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-11 04:52:43.61921851 +0000 UTC m=+84.515451346 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 11 04:52:11 crc kubenswrapper[4651]: I1011 04:52:11.655319 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:11 crc kubenswrapper[4651]: I1011 04:52:11.655516 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:11 crc kubenswrapper[4651]: I1011 04:52:11.655579 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:11 crc kubenswrapper[4651]: I1011 04:52:11.655644 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:11 crc kubenswrapper[4651]: I1011 04:52:11.655704 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:11Z","lastTransitionTime":"2025-10-11T04:52:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:11 crc kubenswrapper[4651]: I1011 04:52:11.720039 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 04:52:11 crc kubenswrapper[4651]: I1011 04:52:11.720085 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 04:52:11 crc kubenswrapper[4651]: E1011 04:52:11.720226 4651 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 11 04:52:11 crc kubenswrapper[4651]: E1011 04:52:11.720243 4651 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 11 04:52:11 crc kubenswrapper[4651]: E1011 04:52:11.720254 4651 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 11 04:52:11 crc kubenswrapper[4651]: E1011 04:52:11.720274 4651 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 11 04:52:11 crc kubenswrapper[4651]: E1011 04:52:11.720312 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-11 04:52:43.720299279 +0000 UTC m=+84.616532075 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 11 04:52:11 crc kubenswrapper[4651]: E1011 04:52:11.720323 4651 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 11 04:52:11 crc kubenswrapper[4651]: E1011 04:52:11.720341 4651 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 11 04:52:11 crc kubenswrapper[4651]: E1011 04:52:11.720399 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-11 04:52:43.720379191 +0000 UTC m=+84.616612067 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 11 04:52:11 crc kubenswrapper[4651]: I1011 04:52:11.762358 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:11 crc kubenswrapper[4651]: I1011 04:52:11.762406 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:11 crc kubenswrapper[4651]: I1011 04:52:11.762423 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:11 crc kubenswrapper[4651]: I1011 04:52:11.762443 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:11 crc kubenswrapper[4651]: I1011 04:52:11.762456 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:11Z","lastTransitionTime":"2025-10-11T04:52:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:11 crc kubenswrapper[4651]: I1011 04:52:11.865730 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:11 crc kubenswrapper[4651]: I1011 04:52:11.865805 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:11 crc kubenswrapper[4651]: I1011 04:52:11.865874 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:11 crc kubenswrapper[4651]: I1011 04:52:11.865913 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:11 crc kubenswrapper[4651]: I1011 04:52:11.865939 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:11Z","lastTransitionTime":"2025-10-11T04:52:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:11 crc kubenswrapper[4651]: I1011 04:52:11.868712 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tgvv8" Oct 11 04:52:11 crc kubenswrapper[4651]: I1011 04:52:11.868711 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 04:52:11 crc kubenswrapper[4651]: E1011 04:52:11.870424 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tgvv8" podUID="a551fed8-58fb-48ae-88af-8dc0cb48fc30" Oct 11 04:52:11 crc kubenswrapper[4651]: I1011 04:52:11.870589 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 04:52:11 crc kubenswrapper[4651]: E1011 04:52:11.871165 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 04:52:11 crc kubenswrapper[4651]: E1011 04:52:11.871404 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 04:52:11 crc kubenswrapper[4651]: I1011 04:52:11.968499 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:11 crc kubenswrapper[4651]: I1011 04:52:11.968562 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:11 crc kubenswrapper[4651]: I1011 04:52:11.968581 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:11 crc kubenswrapper[4651]: I1011 04:52:11.968607 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:11 crc kubenswrapper[4651]: I1011 04:52:11.968624 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:11Z","lastTransitionTime":"2025-10-11T04:52:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:12 crc kubenswrapper[4651]: I1011 04:52:12.072357 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:12 crc kubenswrapper[4651]: I1011 04:52:12.072430 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:12 crc kubenswrapper[4651]: I1011 04:52:12.072445 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:12 crc kubenswrapper[4651]: I1011 04:52:12.072465 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:12 crc kubenswrapper[4651]: I1011 04:52:12.072480 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:12Z","lastTransitionTime":"2025-10-11T04:52:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:12 crc kubenswrapper[4651]: I1011 04:52:12.175609 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:12 crc kubenswrapper[4651]: I1011 04:52:12.176367 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:12 crc kubenswrapper[4651]: I1011 04:52:12.177036 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:12 crc kubenswrapper[4651]: I1011 04:52:12.177086 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:12 crc kubenswrapper[4651]: I1011 04:52:12.177105 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:12Z","lastTransitionTime":"2025-10-11T04:52:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:12 crc kubenswrapper[4651]: I1011 04:52:12.232153 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6zt9s_28e01c08-a461-4f44-a49c-4bf92fd3a2ce/ovnkube-controller/2.log" Oct 11 04:52:12 crc kubenswrapper[4651]: I1011 04:52:12.239802 4651 scope.go:117] "RemoveContainer" containerID="e0575c5b690a6b170f1e7dbb71dc3d09d4dbb86071a2363d6d7c4c85e2c78320" Oct 11 04:52:12 crc kubenswrapper[4651]: E1011 04:52:12.240302 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-6zt9s_openshift-ovn-kubernetes(28e01c08-a461-4f44-a49c-4bf92fd3a2ce)\"" pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" podUID="28e01c08-a461-4f44-a49c-4bf92fd3a2ce" Oct 11 04:52:12 crc kubenswrapper[4651]: I1011 04:52:12.261327 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d425a775a5e278ee77231764e58cac3441e04c383eee54f69ba84488ca640eae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:12Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:12 crc kubenswrapper[4651]: I1011 04:52:12.279972 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:12 crc kubenswrapper[4651]: I1011 04:52:12.280220 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:12 crc kubenswrapper[4651]: I1011 04:52:12.280351 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:12 crc kubenswrapper[4651]: I1011 04:52:12.280503 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:12 crc kubenswrapper[4651]: I1011 04:52:12.280625 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:12Z","lastTransitionTime":"2025-10-11T04:52:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:12 crc kubenswrapper[4651]: I1011 04:52:12.280641 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55e4c9c2d46f67bb0f8ccdb2de27dc82f0280476613b3b30d587ff6f5784e787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:12Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:12 crc kubenswrapper[4651]: I1011 04:52:12.302390 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e5b8526fed77f2e541d6793dc36a902daebce60d367f28f9efb45f7fabb44fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e278c9a5d14f026733737226a6ffc91202b760e48622d5d0031678981fa5f857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:12Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:12 crc kubenswrapper[4651]: I1011 04:52:12.324568 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:12Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:12 crc kubenswrapper[4651]: I1011 04:52:12.345914 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"519a1ae1-e964-48b0-8b61-835146df28c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f12e36a570de649e1df8107ea828cd8e65c18e111de191d3796fc1f3134e43c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wkk99\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://583486a7d6efd8d25aaeac78ea92f84834730c641516b6ade77aeda2baef58e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wkk99\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-78jnv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:12Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:12 crc kubenswrapper[4651]: I1011 04:52:12.369256 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wz4hw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fbfdd781-994b-49b4-9c8e-edc0ea4145d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d37b5524c49533e7dad8b61f0e20b7d1e37b7ae1afc5b9bfe9146a0744202a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c96nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wz4hw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:12Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:12 crc kubenswrapper[4651]: I1011 04:52:12.383959 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:12 crc kubenswrapper[4651]: I1011 04:52:12.384029 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:12 crc kubenswrapper[4651]: I1011 04:52:12.384049 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:12 crc kubenswrapper[4651]: I1011 04:52:12.384134 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:12 crc kubenswrapper[4651]: I1011 04:52:12.384155 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:12Z","lastTransitionTime":"2025-10-11T04:52:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:12 crc kubenswrapper[4651]: I1011 04:52:12.388222 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47b36edf-f918-4105-bb64-66bb71a4b8c4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9521c3af1580aa2a62fc760916de9dc4abdc437700567a6ae5df3a96da2e8942\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6fdcb798e11fca3b7b427f5ef0ac955a893709ffedaa472aaab6c219994e940\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff57b9678d77f617cfc792258091526c2a2f8125c240abfe0b15f693abc948eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11e12782179b96dac12bfdd3a25526e93f18da255e2d2fb0d7522e6d9c373b4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e14956f18479aa61fa0755b9c6c4f26ba7ac18f3023e177a76b9a59101b37c1\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-11T04:51:33Z\\\",\\\"message\\\":\\\"W1011 04:51:22.963111 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1011 04:51:22.963696 1 crypto.go:601] Generating new CA for check-endpoints-signer@1760158282 cert, and key in /tmp/serving-cert-3362973750/serving-signer.crt, /tmp/serving-cert-3362973750/serving-signer.key\\\\nI1011 04:51:23.197526 1 observer_polling.go:159] Starting file observer\\\\nW1011 04:51:23.200452 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1011 04:51:23.200572 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1011 04:51:23.202397 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3362973750/tls.crt::/tmp/serving-cert-3362973750/tls.key\\\\\\\"\\\\nF1011 04:51:33.422520 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f8633747abf191f040cb14f5a76529487743229338070aeca597fc93dae8b3f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55c5a1cac9b1045ca931771e5a096d29e8d986a4d9baae8d779d94f2a30b6cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55c5a1cac9b1045ca931771e5a096d29e8d986a4d9baae8d779d94f2a30b6cd3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:12Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:12 crc kubenswrapper[4651]: I1011 04:52:12.408487 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"744aa461-2868-440d-a1b9-6d30c0c50b56\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aabba06dd47ad528f830a47cc79ddabb3789d24ebed62a6e8f976df1b156ef1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb996f17546fc77ac1b8ed27428aaa6060822daa0156cf016dbb24ed278403a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76ee2612930be6596c0186cc000d80039b52e225fd64102002ed9316a0605521\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c6263dc47a8284e9849da1fb15c4788174252f3637665ac6e4b19298ca90c587\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:12Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:12 crc kubenswrapper[4651]: I1011 04:52:12.428873 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-tgvv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a551fed8-58fb-48ae-88af-8dc0cb48fc30\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:53Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:53Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:53Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nphhq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nphhq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:53Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-tgvv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:12Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:12 crc kubenswrapper[4651]: I1011 04:52:12.447427 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-phsgk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e8fb74d-013d-4103-b029-e8416d079dcf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b7407b7da9820bbcdac4ce9c49c4520834a466b0db187c5f861972713ba1583f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4ftf8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-phsgk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:12Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:12 crc kubenswrapper[4651]: I1011 04:52:12.469575 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:12Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:12 crc kubenswrapper[4651]: I1011 04:52:12.487092 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:12 crc kubenswrapper[4651]: I1011 04:52:12.487170 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:12 crc kubenswrapper[4651]: I1011 04:52:12.487192 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:12 crc kubenswrapper[4651]: I1011 04:52:12.487222 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:12 crc kubenswrapper[4651]: I1011 04:52:12.487244 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:12Z","lastTransitionTime":"2025-10-11T04:52:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:12 crc kubenswrapper[4651]: I1011 04:52:12.495121 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pgwvb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"215170a8-84a9-4e15-9b0e-c1200c680f30\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a036609457cdb5a74a4ccf65ffa2e502f32994e6e3f1ef8a4fd4b89c5454773\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c80b1c5385cb92e140c43bc982ee7cda6406eac9dfe8d9449eaf45332ccc506\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c80b1c5385cb92e140c43bc982ee7cda6406eac9dfe8d9449eaf45332ccc506\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1575fc8fd03d4047a3ed1c0c9df99b77e40296b876d2913a790128f39733d45e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1575fc8fd03d4047a3ed1c0c9df99b77e40296b876d2913a790128f39733d45e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://626e511c110034060639be8565505dc122e834fc63a76d9c4e21dd1dbd5d5a62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://626e511c110034060639be8565505dc122e834fc63a76d9c4e21dd1dbd5d5a62\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9d55cd3f36dcad738ffd05b10e3f665c6bbb949c25365340e70b3c4b53e4175\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9d55cd3f36dcad738ffd05b10e3f665c6bbb949c25365340e70b3c4b53e4175\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27f664812354e7e2ea5feaa075c7f9f72262ddf0a7959196f7a7b14be5af1924\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://27f664812354e7e2ea5feaa075c7f9f72262ddf0a7959196f7a7b14be5af1924\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3b9eccb883d262047b87eae2358f6d27cbd8df5b6917125a8a6259b2eb3433a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3b9eccb883d262047b87eae2358f6d27cbd8df5b6917125a8a6259b2eb3433a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pgwvb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:12Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:12 crc kubenswrapper[4651]: I1011 04:52:12.511906 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qsgwc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f5830d7-3c6f-48f0-b103-a228e7f8e448\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c307ee086caacd0a5299325126ff0b4b140aa1e18153b8f66696534543bb069\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pbjvz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e30f3d9db8e89696a8a98ab24ca995088afa72c957e19ca59ace228a23268d9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pbjvz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:52Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qsgwc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:12Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:12 crc kubenswrapper[4651]: I1011 04:52:12.527402 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:12Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:12 crc kubenswrapper[4651]: I1011 04:52:12.544255 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kwhmr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbcac3cb-b774-47ec-a86c-b22191d14d99\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52a0f9e11cf4e8c69d5b356b702ce9df8cbc935daadadf531004a23e5f6dad65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7n87\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kwhmr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:12Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:12 crc kubenswrapper[4651]: I1011 04:52:12.572048 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0fd0aaae2760a1c934aed54b8e07528f0f78db1149c9e1f8674bec90b61a7078\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://788d6a46d740de869fe7cd1d28479f8115a3515c7bd2482e06cbc4dba7b27fac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68954082d4369b9b4d95ba1f15e19593dc330b698562be413055b2ba012b5f8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3eed8335754437c935f07223bf4c4f40f9ab0bbdc9a86b7610f7f6fd55140e37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23b3fde15e637081cbf5fad2e55ccea1f0fe63d39ff5f82a60ce05ffeef9a837\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99f927067aabe4690e99fe50a069afdf68b22d950a8ae141235571ced468c44e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0575c5b690a6b170f1e7dbb71dc3d09d4dbb86071a2363d6d7c4c85e2c78320\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0575c5b690a6b170f1e7dbb71dc3d09d4dbb86071a2363d6d7c4c85e2c78320\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-11T04:52:10Z\\\",\\\"message\\\":\\\"andler 8\\\\nI1011 04:52:10.796228 6293 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1011 04:52:10.796255 6293 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1011 04:52:10.796280 6293 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1011 04:52:10.796326 6293 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1011 04:52:10.796340 6293 handler.go:208] Removed *v1.Node event handler 2\\\\nI1011 04:52:10.796359 6293 handler.go:208] Removed *v1.Node event handler 7\\\\nI1011 04:52:10.796378 6293 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI1011 04:52:10.796447 6293 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1011 04:52:10.796476 6293 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI1011 04:52:10.796536 6293 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1011 04:52:10.796553 6293 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1011 04:52:10.796592 6293 factory.go:656] Stopping watch factory\\\\nI1011 04:52:10.796634 6293 ovnkube.go:599] Stopped ovnkube\\\\nI1011 04:52:10.796638 6293 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1011 04:52:10.796674 6293 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1011 04:52:10.796792 6293 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T04:52:10Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-6zt9s_openshift-ovn-kubernetes(28e01c08-a461-4f44-a49c-4bf92fd3a2ce)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://add3ed68e0bf59bc569248a3660cfc9a20995641ec65b0bf7133e47c366aad73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccd6180f4c5665d62ee9458795318ef2474c1938d19e6cbfec9c2174fe10e020\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ccd6180f4c5665d62ee9458795318ef2474c1938d19e6cbfec9c2174fe10e020\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6zt9s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:12Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:12 crc kubenswrapper[4651]: I1011 04:52:12.590185 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:12 crc kubenswrapper[4651]: I1011 04:52:12.590441 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:12 crc kubenswrapper[4651]: I1011 04:52:12.590540 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:12 crc kubenswrapper[4651]: I1011 04:52:12.590633 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:12 crc kubenswrapper[4651]: I1011 04:52:12.590717 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:12Z","lastTransitionTime":"2025-10-11T04:52:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:12 crc kubenswrapper[4651]: I1011 04:52:12.694159 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:12 crc kubenswrapper[4651]: I1011 04:52:12.694234 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:12 crc kubenswrapper[4651]: I1011 04:52:12.694255 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:12 crc kubenswrapper[4651]: I1011 04:52:12.694284 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:12 crc kubenswrapper[4651]: I1011 04:52:12.694302 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:12Z","lastTransitionTime":"2025-10-11T04:52:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:12 crc kubenswrapper[4651]: I1011 04:52:12.798000 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:12 crc kubenswrapper[4651]: I1011 04:52:12.798046 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:12 crc kubenswrapper[4651]: I1011 04:52:12.798061 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:12 crc kubenswrapper[4651]: I1011 04:52:12.798081 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:12 crc kubenswrapper[4651]: I1011 04:52:12.798093 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:12Z","lastTransitionTime":"2025-10-11T04:52:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:12 crc kubenswrapper[4651]: I1011 04:52:12.868624 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 04:52:12 crc kubenswrapper[4651]: E1011 04:52:12.868856 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 04:52:12 crc kubenswrapper[4651]: I1011 04:52:12.901526 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:12 crc kubenswrapper[4651]: I1011 04:52:12.901587 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:12 crc kubenswrapper[4651]: I1011 04:52:12.901604 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:12 crc kubenswrapper[4651]: I1011 04:52:12.901629 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:12 crc kubenswrapper[4651]: I1011 04:52:12.901645 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:12Z","lastTransitionTime":"2025-10-11T04:52:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:13 crc kubenswrapper[4651]: I1011 04:52:13.004278 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:13 crc kubenswrapper[4651]: I1011 04:52:13.004356 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:13 crc kubenswrapper[4651]: I1011 04:52:13.004374 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:13 crc kubenswrapper[4651]: I1011 04:52:13.004398 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:13 crc kubenswrapper[4651]: I1011 04:52:13.004416 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:13Z","lastTransitionTime":"2025-10-11T04:52:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:13 crc kubenswrapper[4651]: I1011 04:52:13.108358 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:13 crc kubenswrapper[4651]: I1011 04:52:13.108685 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:13 crc kubenswrapper[4651]: I1011 04:52:13.108889 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:13 crc kubenswrapper[4651]: I1011 04:52:13.109047 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:13 crc kubenswrapper[4651]: I1011 04:52:13.109169 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:13Z","lastTransitionTime":"2025-10-11T04:52:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:13 crc kubenswrapper[4651]: I1011 04:52:13.217075 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:13 crc kubenswrapper[4651]: I1011 04:52:13.217245 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:13 crc kubenswrapper[4651]: I1011 04:52:13.217273 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:13 crc kubenswrapper[4651]: I1011 04:52:13.217327 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:13 crc kubenswrapper[4651]: I1011 04:52:13.217350 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:13Z","lastTransitionTime":"2025-10-11T04:52:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:13 crc kubenswrapper[4651]: I1011 04:52:13.320372 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:13 crc kubenswrapper[4651]: I1011 04:52:13.320438 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:13 crc kubenswrapper[4651]: I1011 04:52:13.320460 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:13 crc kubenswrapper[4651]: I1011 04:52:13.320490 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:13 crc kubenswrapper[4651]: I1011 04:52:13.320513 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:13Z","lastTransitionTime":"2025-10-11T04:52:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:13 crc kubenswrapper[4651]: I1011 04:52:13.423719 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:13 crc kubenswrapper[4651]: I1011 04:52:13.423789 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:13 crc kubenswrapper[4651]: I1011 04:52:13.423807 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:13 crc kubenswrapper[4651]: I1011 04:52:13.423864 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:13 crc kubenswrapper[4651]: I1011 04:52:13.423881 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:13Z","lastTransitionTime":"2025-10-11T04:52:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:13 crc kubenswrapper[4651]: I1011 04:52:13.526306 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:13 crc kubenswrapper[4651]: I1011 04:52:13.526383 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:13 crc kubenswrapper[4651]: I1011 04:52:13.526401 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:13 crc kubenswrapper[4651]: I1011 04:52:13.526424 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:13 crc kubenswrapper[4651]: I1011 04:52:13.526440 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:13Z","lastTransitionTime":"2025-10-11T04:52:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:13 crc kubenswrapper[4651]: I1011 04:52:13.629704 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:13 crc kubenswrapper[4651]: I1011 04:52:13.629765 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:13 crc kubenswrapper[4651]: I1011 04:52:13.629787 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:13 crc kubenswrapper[4651]: I1011 04:52:13.629814 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:13 crc kubenswrapper[4651]: I1011 04:52:13.629863 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:13Z","lastTransitionTime":"2025-10-11T04:52:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:13 crc kubenswrapper[4651]: I1011 04:52:13.733043 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:13 crc kubenswrapper[4651]: I1011 04:52:13.733084 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:13 crc kubenswrapper[4651]: I1011 04:52:13.733092 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:13 crc kubenswrapper[4651]: I1011 04:52:13.733108 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:13 crc kubenswrapper[4651]: I1011 04:52:13.733119 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:13Z","lastTransitionTime":"2025-10-11T04:52:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:13 crc kubenswrapper[4651]: I1011 04:52:13.835615 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:13 crc kubenswrapper[4651]: I1011 04:52:13.835672 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:13 crc kubenswrapper[4651]: I1011 04:52:13.835688 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:13 crc kubenswrapper[4651]: I1011 04:52:13.835713 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:13 crc kubenswrapper[4651]: I1011 04:52:13.835731 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:13Z","lastTransitionTime":"2025-10-11T04:52:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:13 crc kubenswrapper[4651]: I1011 04:52:13.868925 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 04:52:13 crc kubenswrapper[4651]: I1011 04:52:13.869007 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tgvv8" Oct 11 04:52:13 crc kubenswrapper[4651]: E1011 04:52:13.869084 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 04:52:13 crc kubenswrapper[4651]: I1011 04:52:13.869120 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 04:52:13 crc kubenswrapper[4651]: E1011 04:52:13.869261 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tgvv8" podUID="a551fed8-58fb-48ae-88af-8dc0cb48fc30" Oct 11 04:52:13 crc kubenswrapper[4651]: E1011 04:52:13.869523 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 04:52:13 crc kubenswrapper[4651]: I1011 04:52:13.938854 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:13 crc kubenswrapper[4651]: I1011 04:52:13.938921 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:13 crc kubenswrapper[4651]: I1011 04:52:13.938943 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:13 crc kubenswrapper[4651]: I1011 04:52:13.938973 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:13 crc kubenswrapper[4651]: I1011 04:52:13.938996 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:13Z","lastTransitionTime":"2025-10-11T04:52:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:14 crc kubenswrapper[4651]: I1011 04:52:14.042045 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:14 crc kubenswrapper[4651]: I1011 04:52:14.042112 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:14 crc kubenswrapper[4651]: I1011 04:52:14.042135 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:14 crc kubenswrapper[4651]: I1011 04:52:14.042162 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:14 crc kubenswrapper[4651]: I1011 04:52:14.042180 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:14Z","lastTransitionTime":"2025-10-11T04:52:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:14 crc kubenswrapper[4651]: I1011 04:52:14.144528 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:14 crc kubenswrapper[4651]: I1011 04:52:14.144592 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:14 crc kubenswrapper[4651]: I1011 04:52:14.144609 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:14 crc kubenswrapper[4651]: I1011 04:52:14.144635 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:14 crc kubenswrapper[4651]: I1011 04:52:14.144654 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:14Z","lastTransitionTime":"2025-10-11T04:52:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:14 crc kubenswrapper[4651]: I1011 04:52:14.246969 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:14 crc kubenswrapper[4651]: I1011 04:52:14.247045 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:14 crc kubenswrapper[4651]: I1011 04:52:14.247069 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:14 crc kubenswrapper[4651]: I1011 04:52:14.247098 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:14 crc kubenswrapper[4651]: I1011 04:52:14.247119 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:14Z","lastTransitionTime":"2025-10-11T04:52:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:14 crc kubenswrapper[4651]: I1011 04:52:14.349731 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:14 crc kubenswrapper[4651]: I1011 04:52:14.349982 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:14 crc kubenswrapper[4651]: I1011 04:52:14.350053 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:14 crc kubenswrapper[4651]: I1011 04:52:14.350128 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:14 crc kubenswrapper[4651]: I1011 04:52:14.350201 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:14Z","lastTransitionTime":"2025-10-11T04:52:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:14 crc kubenswrapper[4651]: I1011 04:52:14.453241 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:14 crc kubenswrapper[4651]: I1011 04:52:14.453309 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:14 crc kubenswrapper[4651]: I1011 04:52:14.453326 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:14 crc kubenswrapper[4651]: I1011 04:52:14.453349 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:14 crc kubenswrapper[4651]: I1011 04:52:14.453365 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:14Z","lastTransitionTime":"2025-10-11T04:52:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:14 crc kubenswrapper[4651]: I1011 04:52:14.556372 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:14 crc kubenswrapper[4651]: I1011 04:52:14.556431 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:14 crc kubenswrapper[4651]: I1011 04:52:14.556449 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:14 crc kubenswrapper[4651]: I1011 04:52:14.556471 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:14 crc kubenswrapper[4651]: I1011 04:52:14.556487 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:14Z","lastTransitionTime":"2025-10-11T04:52:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:14 crc kubenswrapper[4651]: I1011 04:52:14.659061 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:14 crc kubenswrapper[4651]: I1011 04:52:14.659120 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:14 crc kubenswrapper[4651]: I1011 04:52:14.659135 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:14 crc kubenswrapper[4651]: I1011 04:52:14.659159 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:14 crc kubenswrapper[4651]: I1011 04:52:14.659176 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:14Z","lastTransitionTime":"2025-10-11T04:52:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:14 crc kubenswrapper[4651]: I1011 04:52:14.762294 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:14 crc kubenswrapper[4651]: I1011 04:52:14.762362 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:14 crc kubenswrapper[4651]: I1011 04:52:14.762382 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:14 crc kubenswrapper[4651]: I1011 04:52:14.762406 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:14 crc kubenswrapper[4651]: I1011 04:52:14.762425 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:14Z","lastTransitionTime":"2025-10-11T04:52:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:14 crc kubenswrapper[4651]: I1011 04:52:14.866003 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:14 crc kubenswrapper[4651]: I1011 04:52:14.866859 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:14 crc kubenswrapper[4651]: I1011 04:52:14.867024 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:14 crc kubenswrapper[4651]: I1011 04:52:14.867182 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:14 crc kubenswrapper[4651]: I1011 04:52:14.867395 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:14Z","lastTransitionTime":"2025-10-11T04:52:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:14 crc kubenswrapper[4651]: I1011 04:52:14.869284 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 04:52:14 crc kubenswrapper[4651]: E1011 04:52:14.869463 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 04:52:14 crc kubenswrapper[4651]: I1011 04:52:14.971634 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:14 crc kubenswrapper[4651]: I1011 04:52:14.971683 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:14 crc kubenswrapper[4651]: I1011 04:52:14.971699 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:14 crc kubenswrapper[4651]: I1011 04:52:14.971723 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:14 crc kubenswrapper[4651]: I1011 04:52:14.971740 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:14Z","lastTransitionTime":"2025-10-11T04:52:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:15 crc kubenswrapper[4651]: I1011 04:52:15.073732 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:15 crc kubenswrapper[4651]: I1011 04:52:15.073787 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:15 crc kubenswrapper[4651]: I1011 04:52:15.073802 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:15 crc kubenswrapper[4651]: I1011 04:52:15.073846 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:15 crc kubenswrapper[4651]: I1011 04:52:15.073860 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:15Z","lastTransitionTime":"2025-10-11T04:52:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:15 crc kubenswrapper[4651]: I1011 04:52:15.177580 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:15 crc kubenswrapper[4651]: I1011 04:52:15.177753 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:15 crc kubenswrapper[4651]: I1011 04:52:15.177785 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:15 crc kubenswrapper[4651]: I1011 04:52:15.177809 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:15 crc kubenswrapper[4651]: I1011 04:52:15.177856 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:15Z","lastTransitionTime":"2025-10-11T04:52:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:15 crc kubenswrapper[4651]: I1011 04:52:15.280995 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:15 crc kubenswrapper[4651]: I1011 04:52:15.281314 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:15 crc kubenswrapper[4651]: I1011 04:52:15.281881 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:15 crc kubenswrapper[4651]: I1011 04:52:15.282090 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:15 crc kubenswrapper[4651]: I1011 04:52:15.282307 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:15Z","lastTransitionTime":"2025-10-11T04:52:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:15 crc kubenswrapper[4651]: I1011 04:52:15.385355 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:15 crc kubenswrapper[4651]: I1011 04:52:15.385417 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:15 crc kubenswrapper[4651]: I1011 04:52:15.385436 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:15 crc kubenswrapper[4651]: I1011 04:52:15.385460 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:15 crc kubenswrapper[4651]: I1011 04:52:15.385477 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:15Z","lastTransitionTime":"2025-10-11T04:52:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:15 crc kubenswrapper[4651]: I1011 04:52:15.488699 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:15 crc kubenswrapper[4651]: I1011 04:52:15.488767 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:15 crc kubenswrapper[4651]: I1011 04:52:15.488779 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:15 crc kubenswrapper[4651]: I1011 04:52:15.488798 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:15 crc kubenswrapper[4651]: I1011 04:52:15.488811 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:15Z","lastTransitionTime":"2025-10-11T04:52:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:15 crc kubenswrapper[4651]: I1011 04:52:15.590720 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:15 crc kubenswrapper[4651]: I1011 04:52:15.590785 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:15 crc kubenswrapper[4651]: I1011 04:52:15.590796 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:15 crc kubenswrapper[4651]: I1011 04:52:15.590814 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:15 crc kubenswrapper[4651]: I1011 04:52:15.590898 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:15Z","lastTransitionTime":"2025-10-11T04:52:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:15 crc kubenswrapper[4651]: I1011 04:52:15.693921 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:15 crc kubenswrapper[4651]: I1011 04:52:15.694312 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:15 crc kubenswrapper[4651]: I1011 04:52:15.694494 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:15 crc kubenswrapper[4651]: I1011 04:52:15.694651 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:15 crc kubenswrapper[4651]: I1011 04:52:15.694789 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:15Z","lastTransitionTime":"2025-10-11T04:52:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:15 crc kubenswrapper[4651]: I1011 04:52:15.797123 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:15 crc kubenswrapper[4651]: I1011 04:52:15.797161 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:15 crc kubenswrapper[4651]: I1011 04:52:15.797169 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:15 crc kubenswrapper[4651]: I1011 04:52:15.797183 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:15 crc kubenswrapper[4651]: I1011 04:52:15.797192 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:15Z","lastTransitionTime":"2025-10-11T04:52:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:15 crc kubenswrapper[4651]: I1011 04:52:15.869050 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 04:52:15 crc kubenswrapper[4651]: I1011 04:52:15.869118 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tgvv8" Oct 11 04:52:15 crc kubenswrapper[4651]: E1011 04:52:15.869159 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 04:52:15 crc kubenswrapper[4651]: I1011 04:52:15.869062 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 04:52:15 crc kubenswrapper[4651]: E1011 04:52:15.869214 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tgvv8" podUID="a551fed8-58fb-48ae-88af-8dc0cb48fc30" Oct 11 04:52:15 crc kubenswrapper[4651]: E1011 04:52:15.869470 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 04:52:15 crc kubenswrapper[4651]: I1011 04:52:15.899776 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:15 crc kubenswrapper[4651]: I1011 04:52:15.899839 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:15 crc kubenswrapper[4651]: I1011 04:52:15.899850 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:15 crc kubenswrapper[4651]: I1011 04:52:15.899865 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:15 crc kubenswrapper[4651]: I1011 04:52:15.899874 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:15Z","lastTransitionTime":"2025-10-11T04:52:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:16 crc kubenswrapper[4651]: I1011 04:52:16.002450 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:16 crc kubenswrapper[4651]: I1011 04:52:16.002493 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:16 crc kubenswrapper[4651]: I1011 04:52:16.002504 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:16 crc kubenswrapper[4651]: I1011 04:52:16.002517 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:16 crc kubenswrapper[4651]: I1011 04:52:16.002529 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:16Z","lastTransitionTime":"2025-10-11T04:52:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:16 crc kubenswrapper[4651]: I1011 04:52:16.106277 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:16 crc kubenswrapper[4651]: I1011 04:52:16.106352 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:16 crc kubenswrapper[4651]: I1011 04:52:16.106377 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:16 crc kubenswrapper[4651]: I1011 04:52:16.106407 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:16 crc kubenswrapper[4651]: I1011 04:52:16.106430 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:16Z","lastTransitionTime":"2025-10-11T04:52:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:16 crc kubenswrapper[4651]: I1011 04:52:16.209577 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:16 crc kubenswrapper[4651]: I1011 04:52:16.209653 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:16 crc kubenswrapper[4651]: I1011 04:52:16.209670 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:16 crc kubenswrapper[4651]: I1011 04:52:16.209700 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:16 crc kubenswrapper[4651]: I1011 04:52:16.209716 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:16Z","lastTransitionTime":"2025-10-11T04:52:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:16 crc kubenswrapper[4651]: I1011 04:52:16.312571 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:16 crc kubenswrapper[4651]: I1011 04:52:16.312678 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:16 crc kubenswrapper[4651]: I1011 04:52:16.312703 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:16 crc kubenswrapper[4651]: I1011 04:52:16.312734 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:16 crc kubenswrapper[4651]: I1011 04:52:16.312761 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:16Z","lastTransitionTime":"2025-10-11T04:52:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:16 crc kubenswrapper[4651]: I1011 04:52:16.396028 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:16 crc kubenswrapper[4651]: I1011 04:52:16.396280 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:16 crc kubenswrapper[4651]: I1011 04:52:16.396371 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:16 crc kubenswrapper[4651]: I1011 04:52:16.396464 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:16 crc kubenswrapper[4651]: I1011 04:52:16.396559 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:16Z","lastTransitionTime":"2025-10-11T04:52:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:16 crc kubenswrapper[4651]: E1011 04:52:16.417637 4651 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:16Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:16Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a821a5c3-63e0-43db-82e0-e9c6e98ead52\\\",\\\"systemUUID\\\":\\\"f1c4ea71-0c28-43a7-99a4-e27ff72e186a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:16Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:16 crc kubenswrapper[4651]: I1011 04:52:16.421683 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:16 crc kubenswrapper[4651]: I1011 04:52:16.421755 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:16 crc kubenswrapper[4651]: I1011 04:52:16.421779 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:16 crc kubenswrapper[4651]: I1011 04:52:16.421804 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:16 crc kubenswrapper[4651]: I1011 04:52:16.421850 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:16Z","lastTransitionTime":"2025-10-11T04:52:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:16 crc kubenswrapper[4651]: E1011 04:52:16.435149 4651 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:16Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:16Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a821a5c3-63e0-43db-82e0-e9c6e98ead52\\\",\\\"systemUUID\\\":\\\"f1c4ea71-0c28-43a7-99a4-e27ff72e186a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:16Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:16 crc kubenswrapper[4651]: I1011 04:52:16.439316 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:16 crc kubenswrapper[4651]: I1011 04:52:16.439468 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:16 crc kubenswrapper[4651]: I1011 04:52:16.439584 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:16 crc kubenswrapper[4651]: I1011 04:52:16.439733 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:16 crc kubenswrapper[4651]: I1011 04:52:16.439843 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:16Z","lastTransitionTime":"2025-10-11T04:52:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:16 crc kubenswrapper[4651]: E1011 04:52:16.460125 4651 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:16Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:16Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a821a5c3-63e0-43db-82e0-e9c6e98ead52\\\",\\\"systemUUID\\\":\\\"f1c4ea71-0c28-43a7-99a4-e27ff72e186a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:16Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:16 crc kubenswrapper[4651]: I1011 04:52:16.464724 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:16 crc kubenswrapper[4651]: I1011 04:52:16.464783 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:16 crc kubenswrapper[4651]: I1011 04:52:16.464801 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:16 crc kubenswrapper[4651]: I1011 04:52:16.464858 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:16 crc kubenswrapper[4651]: I1011 04:52:16.464881 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:16Z","lastTransitionTime":"2025-10-11T04:52:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:16 crc kubenswrapper[4651]: E1011 04:52:16.481729 4651 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:16Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:16Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a821a5c3-63e0-43db-82e0-e9c6e98ead52\\\",\\\"systemUUID\\\":\\\"f1c4ea71-0c28-43a7-99a4-e27ff72e186a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:16Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:16 crc kubenswrapper[4651]: I1011 04:52:16.485611 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:16 crc kubenswrapper[4651]: I1011 04:52:16.485749 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:16 crc kubenswrapper[4651]: I1011 04:52:16.485869 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:16 crc kubenswrapper[4651]: I1011 04:52:16.486064 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:16 crc kubenswrapper[4651]: I1011 04:52:16.486164 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:16Z","lastTransitionTime":"2025-10-11T04:52:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:16 crc kubenswrapper[4651]: E1011 04:52:16.506553 4651 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:16Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:16Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a821a5c3-63e0-43db-82e0-e9c6e98ead52\\\",\\\"systemUUID\\\":\\\"f1c4ea71-0c28-43a7-99a4-e27ff72e186a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:16Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:16 crc kubenswrapper[4651]: E1011 04:52:16.506713 4651 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 11 04:52:16 crc kubenswrapper[4651]: I1011 04:52:16.508530 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:16 crc kubenswrapper[4651]: I1011 04:52:16.508566 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:16 crc kubenswrapper[4651]: I1011 04:52:16.508579 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:16 crc kubenswrapper[4651]: I1011 04:52:16.508595 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:16 crc kubenswrapper[4651]: I1011 04:52:16.508608 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:16Z","lastTransitionTime":"2025-10-11T04:52:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:16 crc kubenswrapper[4651]: I1011 04:52:16.611111 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:16 crc kubenswrapper[4651]: I1011 04:52:16.611137 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:16 crc kubenswrapper[4651]: I1011 04:52:16.611145 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:16 crc kubenswrapper[4651]: I1011 04:52:16.611221 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:16 crc kubenswrapper[4651]: I1011 04:52:16.611232 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:16Z","lastTransitionTime":"2025-10-11T04:52:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:16 crc kubenswrapper[4651]: I1011 04:52:16.632183 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 11 04:52:16 crc kubenswrapper[4651]: I1011 04:52:16.643578 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Oct 11 04:52:16 crc kubenswrapper[4651]: I1011 04:52:16.649535 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:16Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:16 crc kubenswrapper[4651]: I1011 04:52:16.663947 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-tgvv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a551fed8-58fb-48ae-88af-8dc0cb48fc30\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:53Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:53Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:53Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nphhq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nphhq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:53Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-tgvv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:16Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:16 crc kubenswrapper[4651]: I1011 04:52:16.679261 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-phsgk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e8fb74d-013d-4103-b029-e8416d079dcf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b7407b7da9820bbcdac4ce9c49c4520834a466b0db187c5f861972713ba1583f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4ftf8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-phsgk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:16Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:16 crc kubenswrapper[4651]: I1011 04:52:16.709529 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0fd0aaae2760a1c934aed54b8e07528f0f78db1149c9e1f8674bec90b61a7078\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://788d6a46d740de869fe7cd1d28479f8115a3515c7bd2482e06cbc4dba7b27fac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68954082d4369b9b4d95ba1f15e19593dc330b698562be413055b2ba012b5f8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3eed8335754437c935f07223bf4c4f40f9ab0bbdc9a86b7610f7f6fd55140e37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23b3fde15e637081cbf5fad2e55ccea1f0fe63d39ff5f82a60ce05ffeef9a837\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99f927067aabe4690e99fe50a069afdf68b22d950a8ae141235571ced468c44e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0575c5b690a6b170f1e7dbb71dc3d09d4dbb86071a2363d6d7c4c85e2c78320\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0575c5b690a6b170f1e7dbb71dc3d09d4dbb86071a2363d6d7c4c85e2c78320\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-11T04:52:10Z\\\",\\\"message\\\":\\\"andler 8\\\\nI1011 04:52:10.796228 6293 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1011 04:52:10.796255 6293 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1011 04:52:10.796280 6293 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1011 04:52:10.796326 6293 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1011 04:52:10.796340 6293 handler.go:208] Removed *v1.Node event handler 2\\\\nI1011 04:52:10.796359 6293 handler.go:208] Removed *v1.Node event handler 7\\\\nI1011 04:52:10.796378 6293 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI1011 04:52:10.796447 6293 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1011 04:52:10.796476 6293 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI1011 04:52:10.796536 6293 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1011 04:52:10.796553 6293 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1011 04:52:10.796592 6293 factory.go:656] Stopping watch factory\\\\nI1011 04:52:10.796634 6293 ovnkube.go:599] Stopped ovnkube\\\\nI1011 04:52:10.796638 6293 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1011 04:52:10.796674 6293 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1011 04:52:10.796792 6293 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T04:52:10Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-6zt9s_openshift-ovn-kubernetes(28e01c08-a461-4f44-a49c-4bf92fd3a2ce)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://add3ed68e0bf59bc569248a3660cfc9a20995641ec65b0bf7133e47c366aad73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccd6180f4c5665d62ee9458795318ef2474c1938d19e6cbfec9c2174fe10e020\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ccd6180f4c5665d62ee9458795318ef2474c1938d19e6cbfec9c2174fe10e020\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6zt9s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:16Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:16 crc kubenswrapper[4651]: I1011 04:52:16.713469 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:16 crc kubenswrapper[4651]: I1011 04:52:16.713524 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:16 crc kubenswrapper[4651]: I1011 04:52:16.713541 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:16 crc kubenswrapper[4651]: I1011 04:52:16.713563 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:16 crc kubenswrapper[4651]: I1011 04:52:16.713580 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:16Z","lastTransitionTime":"2025-10-11T04:52:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:16 crc kubenswrapper[4651]: I1011 04:52:16.728624 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pgwvb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"215170a8-84a9-4e15-9b0e-c1200c680f30\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a036609457cdb5a74a4ccf65ffa2e502f32994e6e3f1ef8a4fd4b89c5454773\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c80b1c5385cb92e140c43bc982ee7cda6406eac9dfe8d9449eaf45332ccc506\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c80b1c5385cb92e140c43bc982ee7cda6406eac9dfe8d9449eaf45332ccc506\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1575fc8fd03d4047a3ed1c0c9df99b77e40296b876d2913a790128f39733d45e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1575fc8fd03d4047a3ed1c0c9df99b77e40296b876d2913a790128f39733d45e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://626e511c110034060639be8565505dc122e834fc63a76d9c4e21dd1dbd5d5a62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://626e511c110034060639be8565505dc122e834fc63a76d9c4e21dd1dbd5d5a62\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9d55cd3f36dcad738ffd05b10e3f665c6bbb949c25365340e70b3c4b53e4175\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9d55cd3f36dcad738ffd05b10e3f665c6bbb949c25365340e70b3c4b53e4175\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27f664812354e7e2ea5feaa075c7f9f72262ddf0a7959196f7a7b14be5af1924\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://27f664812354e7e2ea5feaa075c7f9f72262ddf0a7959196f7a7b14be5af1924\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3b9eccb883d262047b87eae2358f6d27cbd8df5b6917125a8a6259b2eb3433a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3b9eccb883d262047b87eae2358f6d27cbd8df5b6917125a8a6259b2eb3433a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pgwvb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:16Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:16 crc kubenswrapper[4651]: I1011 04:52:16.744041 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qsgwc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f5830d7-3c6f-48f0-b103-a228e7f8e448\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c307ee086caacd0a5299325126ff0b4b140aa1e18153b8f66696534543bb069\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pbjvz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e30f3d9db8e89696a8a98ab24ca995088afa72c957e19ca59ace228a23268d9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pbjvz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:52Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qsgwc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:16Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:16 crc kubenswrapper[4651]: I1011 04:52:16.761612 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:16Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:16 crc kubenswrapper[4651]: I1011 04:52:16.775180 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kwhmr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbcac3cb-b774-47ec-a86c-b22191d14d99\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52a0f9e11cf4e8c69d5b356b702ce9df8cbc935daadadf531004a23e5f6dad65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7n87\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kwhmr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:16Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:16 crc kubenswrapper[4651]: I1011 04:52:16.793485 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e5b8526fed77f2e541d6793dc36a902daebce60d367f28f9efb45f7fabb44fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e278c9a5d14f026733737226a6ffc91202b760e48622d5d0031678981fa5f857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:16Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:16 crc kubenswrapper[4651]: I1011 04:52:16.812766 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d425a775a5e278ee77231764e58cac3441e04c383eee54f69ba84488ca640eae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:16Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:16 crc kubenswrapper[4651]: I1011 04:52:16.817018 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:16 crc kubenswrapper[4651]: I1011 04:52:16.817070 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:16 crc kubenswrapper[4651]: I1011 04:52:16.817088 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:16 crc kubenswrapper[4651]: I1011 04:52:16.817112 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:16 crc kubenswrapper[4651]: I1011 04:52:16.817130 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:16Z","lastTransitionTime":"2025-10-11T04:52:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:16 crc kubenswrapper[4651]: I1011 04:52:16.830695 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55e4c9c2d46f67bb0f8ccdb2de27dc82f0280476613b3b30d587ff6f5784e787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:16Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:16 crc kubenswrapper[4651]: I1011 04:52:16.845102 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:16Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:16 crc kubenswrapper[4651]: I1011 04:52:16.858174 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"519a1ae1-e964-48b0-8b61-835146df28c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f12e36a570de649e1df8107ea828cd8e65c18e111de191d3796fc1f3134e43c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wkk99\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://583486a7d6efd8d25aaeac78ea92f84834730c641516b6ade77aeda2baef58e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wkk99\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-78jnv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:16Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:16 crc kubenswrapper[4651]: I1011 04:52:16.869302 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 04:52:16 crc kubenswrapper[4651]: E1011 04:52:16.869502 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 04:52:16 crc kubenswrapper[4651]: I1011 04:52:16.878041 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wz4hw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fbfdd781-994b-49b4-9c8e-edc0ea4145d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d37b5524c49533e7dad8b61f0e20b7d1e37b7ae1afc5b9bfe9146a0744202a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c96nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wz4hw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:16Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:16 crc kubenswrapper[4651]: I1011 04:52:16.894527 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47b36edf-f918-4105-bb64-66bb71a4b8c4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9521c3af1580aa2a62fc760916de9dc4abdc437700567a6ae5df3a96da2e8942\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6fdcb798e11fca3b7b427f5ef0ac955a893709ffedaa472aaab6c219994e940\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff57b9678d77f617cfc792258091526c2a2f8125c240abfe0b15f693abc948eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11e12782179b96dac12bfdd3a25526e93f18da255e2d2fb0d7522e6d9c373b4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e14956f18479aa61fa0755b9c6c4f26ba7ac18f3023e177a76b9a59101b37c1\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-11T04:51:33Z\\\",\\\"message\\\":\\\"W1011 04:51:22.963111 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1011 04:51:22.963696 1 crypto.go:601] Generating new CA for check-endpoints-signer@1760158282 cert, and key in /tmp/serving-cert-3362973750/serving-signer.crt, /tmp/serving-cert-3362973750/serving-signer.key\\\\nI1011 04:51:23.197526 1 observer_polling.go:159] Starting file observer\\\\nW1011 04:51:23.200452 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1011 04:51:23.200572 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1011 04:51:23.202397 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3362973750/tls.crt::/tmp/serving-cert-3362973750/tls.key\\\\\\\"\\\\nF1011 04:51:33.422520 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f8633747abf191f040cb14f5a76529487743229338070aeca597fc93dae8b3f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55c5a1cac9b1045ca931771e5a096d29e8d986a4d9baae8d779d94f2a30b6cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55c5a1cac9b1045ca931771e5a096d29e8d986a4d9baae8d779d94f2a30b6cd3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:16Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:16 crc kubenswrapper[4651]: I1011 04:52:16.910328 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"744aa461-2868-440d-a1b9-6d30c0c50b56\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aabba06dd47ad528f830a47cc79ddabb3789d24ebed62a6e8f976df1b156ef1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb996f17546fc77ac1b8ed27428aaa6060822daa0156cf016dbb24ed278403a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76ee2612930be6596c0186cc000d80039b52e225fd64102002ed9316a0605521\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c6263dc47a8284e9849da1fb15c4788174252f3637665ac6e4b19298ca90c587\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:16Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:16 crc kubenswrapper[4651]: I1011 04:52:16.920295 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:16 crc kubenswrapper[4651]: I1011 04:52:16.920356 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:16 crc kubenswrapper[4651]: I1011 04:52:16.920371 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:16 crc kubenswrapper[4651]: I1011 04:52:16.920393 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:16 crc kubenswrapper[4651]: I1011 04:52:16.920409 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:16Z","lastTransitionTime":"2025-10-11T04:52:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:17 crc kubenswrapper[4651]: I1011 04:52:17.023771 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:17 crc kubenswrapper[4651]: I1011 04:52:17.024133 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:17 crc kubenswrapper[4651]: I1011 04:52:17.024276 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:17 crc kubenswrapper[4651]: I1011 04:52:17.024479 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:17 crc kubenswrapper[4651]: I1011 04:52:17.024627 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:17Z","lastTransitionTime":"2025-10-11T04:52:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:17 crc kubenswrapper[4651]: I1011 04:52:17.129772 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:17 crc kubenswrapper[4651]: I1011 04:52:17.129946 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:17 crc kubenswrapper[4651]: I1011 04:52:17.129975 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:17 crc kubenswrapper[4651]: I1011 04:52:17.130497 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:17 crc kubenswrapper[4651]: I1011 04:52:17.130531 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:17Z","lastTransitionTime":"2025-10-11T04:52:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:17 crc kubenswrapper[4651]: I1011 04:52:17.233379 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:17 crc kubenswrapper[4651]: I1011 04:52:17.233429 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:17 crc kubenswrapper[4651]: I1011 04:52:17.233446 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:17 crc kubenswrapper[4651]: I1011 04:52:17.233471 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:17 crc kubenswrapper[4651]: I1011 04:52:17.233486 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:17Z","lastTransitionTime":"2025-10-11T04:52:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:17 crc kubenswrapper[4651]: I1011 04:52:17.336562 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:17 crc kubenswrapper[4651]: I1011 04:52:17.336604 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:17 crc kubenswrapper[4651]: I1011 04:52:17.336618 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:17 crc kubenswrapper[4651]: I1011 04:52:17.336638 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:17 crc kubenswrapper[4651]: I1011 04:52:17.336652 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:17Z","lastTransitionTime":"2025-10-11T04:52:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:17 crc kubenswrapper[4651]: I1011 04:52:17.439227 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:17 crc kubenswrapper[4651]: I1011 04:52:17.439263 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:17 crc kubenswrapper[4651]: I1011 04:52:17.439275 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:17 crc kubenswrapper[4651]: I1011 04:52:17.439292 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:17 crc kubenswrapper[4651]: I1011 04:52:17.439303 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:17Z","lastTransitionTime":"2025-10-11T04:52:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:17 crc kubenswrapper[4651]: I1011 04:52:17.542050 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:17 crc kubenswrapper[4651]: I1011 04:52:17.542124 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:17 crc kubenswrapper[4651]: I1011 04:52:17.542147 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:17 crc kubenswrapper[4651]: I1011 04:52:17.542179 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:17 crc kubenswrapper[4651]: I1011 04:52:17.542200 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:17Z","lastTransitionTime":"2025-10-11T04:52:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:17 crc kubenswrapper[4651]: I1011 04:52:17.645325 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:17 crc kubenswrapper[4651]: I1011 04:52:17.645409 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:17 crc kubenswrapper[4651]: I1011 04:52:17.645433 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:17 crc kubenswrapper[4651]: I1011 04:52:17.645464 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:17 crc kubenswrapper[4651]: I1011 04:52:17.645486 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:17Z","lastTransitionTime":"2025-10-11T04:52:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:17 crc kubenswrapper[4651]: I1011 04:52:17.748470 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:17 crc kubenswrapper[4651]: I1011 04:52:17.748570 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:17 crc kubenswrapper[4651]: I1011 04:52:17.748595 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:17 crc kubenswrapper[4651]: I1011 04:52:17.748630 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:17 crc kubenswrapper[4651]: I1011 04:52:17.748654 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:17Z","lastTransitionTime":"2025-10-11T04:52:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:17 crc kubenswrapper[4651]: I1011 04:52:17.851810 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:17 crc kubenswrapper[4651]: I1011 04:52:17.851925 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:17 crc kubenswrapper[4651]: I1011 04:52:17.851952 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:17 crc kubenswrapper[4651]: I1011 04:52:17.851983 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:17 crc kubenswrapper[4651]: I1011 04:52:17.852007 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:17Z","lastTransitionTime":"2025-10-11T04:52:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:17 crc kubenswrapper[4651]: I1011 04:52:17.869513 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 04:52:17 crc kubenswrapper[4651]: I1011 04:52:17.869599 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 04:52:17 crc kubenswrapper[4651]: I1011 04:52:17.869513 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tgvv8" Oct 11 04:52:17 crc kubenswrapper[4651]: E1011 04:52:17.869704 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 04:52:17 crc kubenswrapper[4651]: E1011 04:52:17.869894 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 04:52:17 crc kubenswrapper[4651]: E1011 04:52:17.870109 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tgvv8" podUID="a551fed8-58fb-48ae-88af-8dc0cb48fc30" Oct 11 04:52:17 crc kubenswrapper[4651]: I1011 04:52:17.955112 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:17 crc kubenswrapper[4651]: I1011 04:52:17.955181 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:17 crc kubenswrapper[4651]: I1011 04:52:17.955203 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:17 crc kubenswrapper[4651]: I1011 04:52:17.955232 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:17 crc kubenswrapper[4651]: I1011 04:52:17.955255 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:17Z","lastTransitionTime":"2025-10-11T04:52:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:18 crc kubenswrapper[4651]: I1011 04:52:18.058429 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:18 crc kubenswrapper[4651]: I1011 04:52:18.058480 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:18 crc kubenswrapper[4651]: I1011 04:52:18.058497 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:18 crc kubenswrapper[4651]: I1011 04:52:18.058520 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:18 crc kubenswrapper[4651]: I1011 04:52:18.058538 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:18Z","lastTransitionTime":"2025-10-11T04:52:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:18 crc kubenswrapper[4651]: I1011 04:52:18.161090 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:18 crc kubenswrapper[4651]: I1011 04:52:18.161140 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:18 crc kubenswrapper[4651]: I1011 04:52:18.161157 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:18 crc kubenswrapper[4651]: I1011 04:52:18.161182 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:18 crc kubenswrapper[4651]: I1011 04:52:18.161200 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:18Z","lastTransitionTime":"2025-10-11T04:52:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:18 crc kubenswrapper[4651]: I1011 04:52:18.263876 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:18 crc kubenswrapper[4651]: I1011 04:52:18.263924 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:18 crc kubenswrapper[4651]: I1011 04:52:18.263939 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:18 crc kubenswrapper[4651]: I1011 04:52:18.263958 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:18 crc kubenswrapper[4651]: I1011 04:52:18.263971 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:18Z","lastTransitionTime":"2025-10-11T04:52:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:18 crc kubenswrapper[4651]: I1011 04:52:18.365863 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:18 crc kubenswrapper[4651]: I1011 04:52:18.365902 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:18 crc kubenswrapper[4651]: I1011 04:52:18.365913 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:18 crc kubenswrapper[4651]: I1011 04:52:18.365930 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:18 crc kubenswrapper[4651]: I1011 04:52:18.365942 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:18Z","lastTransitionTime":"2025-10-11T04:52:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:18 crc kubenswrapper[4651]: I1011 04:52:18.468099 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:18 crc kubenswrapper[4651]: I1011 04:52:18.468135 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:18 crc kubenswrapper[4651]: I1011 04:52:18.468144 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:18 crc kubenswrapper[4651]: I1011 04:52:18.468158 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:18 crc kubenswrapper[4651]: I1011 04:52:18.468169 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:18Z","lastTransitionTime":"2025-10-11T04:52:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:18 crc kubenswrapper[4651]: I1011 04:52:18.570549 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:18 crc kubenswrapper[4651]: I1011 04:52:18.570597 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:18 crc kubenswrapper[4651]: I1011 04:52:18.570615 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:18 crc kubenswrapper[4651]: I1011 04:52:18.570651 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:18 crc kubenswrapper[4651]: I1011 04:52:18.570671 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:18Z","lastTransitionTime":"2025-10-11T04:52:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:18 crc kubenswrapper[4651]: I1011 04:52:18.673570 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:18 crc kubenswrapper[4651]: I1011 04:52:18.673632 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:18 crc kubenswrapper[4651]: I1011 04:52:18.673645 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:18 crc kubenswrapper[4651]: I1011 04:52:18.673707 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:18 crc kubenswrapper[4651]: I1011 04:52:18.673729 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:18Z","lastTransitionTime":"2025-10-11T04:52:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:18 crc kubenswrapper[4651]: I1011 04:52:18.778195 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:18 crc kubenswrapper[4651]: I1011 04:52:18.778230 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:18 crc kubenswrapper[4651]: I1011 04:52:18.778242 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:18 crc kubenswrapper[4651]: I1011 04:52:18.778259 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:18 crc kubenswrapper[4651]: I1011 04:52:18.778272 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:18Z","lastTransitionTime":"2025-10-11T04:52:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:18 crc kubenswrapper[4651]: I1011 04:52:18.868564 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 04:52:18 crc kubenswrapper[4651]: E1011 04:52:18.868894 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 04:52:18 crc kubenswrapper[4651]: I1011 04:52:18.881734 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:18 crc kubenswrapper[4651]: I1011 04:52:18.882252 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:18 crc kubenswrapper[4651]: I1011 04:52:18.882347 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:18 crc kubenswrapper[4651]: I1011 04:52:18.882452 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:18 crc kubenswrapper[4651]: I1011 04:52:18.882539 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:18Z","lastTransitionTime":"2025-10-11T04:52:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:18 crc kubenswrapper[4651]: I1011 04:52:18.986700 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:18 crc kubenswrapper[4651]: I1011 04:52:18.986777 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:18 crc kubenswrapper[4651]: I1011 04:52:18.986799 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:18 crc kubenswrapper[4651]: I1011 04:52:18.986876 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:18 crc kubenswrapper[4651]: I1011 04:52:18.986901 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:18Z","lastTransitionTime":"2025-10-11T04:52:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:19 crc kubenswrapper[4651]: I1011 04:52:19.090521 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:19 crc kubenswrapper[4651]: I1011 04:52:19.090574 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:19 crc kubenswrapper[4651]: I1011 04:52:19.090588 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:19 crc kubenswrapper[4651]: I1011 04:52:19.090675 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:19 crc kubenswrapper[4651]: I1011 04:52:19.090694 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:19Z","lastTransitionTime":"2025-10-11T04:52:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:19 crc kubenswrapper[4651]: I1011 04:52:19.194067 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:19 crc kubenswrapper[4651]: I1011 04:52:19.194131 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:19 crc kubenswrapper[4651]: I1011 04:52:19.194143 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:19 crc kubenswrapper[4651]: I1011 04:52:19.194165 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:19 crc kubenswrapper[4651]: I1011 04:52:19.194180 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:19Z","lastTransitionTime":"2025-10-11T04:52:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:19 crc kubenswrapper[4651]: I1011 04:52:19.296740 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:19 crc kubenswrapper[4651]: I1011 04:52:19.296776 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:19 crc kubenswrapper[4651]: I1011 04:52:19.296785 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:19 crc kubenswrapper[4651]: I1011 04:52:19.296844 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:19 crc kubenswrapper[4651]: I1011 04:52:19.296858 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:19Z","lastTransitionTime":"2025-10-11T04:52:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:19 crc kubenswrapper[4651]: I1011 04:52:19.400040 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:19 crc kubenswrapper[4651]: I1011 04:52:19.400090 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:19 crc kubenswrapper[4651]: I1011 04:52:19.400105 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:19 crc kubenswrapper[4651]: I1011 04:52:19.400127 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:19 crc kubenswrapper[4651]: I1011 04:52:19.400140 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:19Z","lastTransitionTime":"2025-10-11T04:52:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:19 crc kubenswrapper[4651]: I1011 04:52:19.503504 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:19 crc kubenswrapper[4651]: I1011 04:52:19.503587 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:19 crc kubenswrapper[4651]: I1011 04:52:19.503622 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:19 crc kubenswrapper[4651]: I1011 04:52:19.503658 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:19 crc kubenswrapper[4651]: I1011 04:52:19.503685 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:19Z","lastTransitionTime":"2025-10-11T04:52:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:19 crc kubenswrapper[4651]: I1011 04:52:19.606449 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:19 crc kubenswrapper[4651]: I1011 04:52:19.606494 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:19 crc kubenswrapper[4651]: I1011 04:52:19.606506 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:19 crc kubenswrapper[4651]: I1011 04:52:19.606524 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:19 crc kubenswrapper[4651]: I1011 04:52:19.606539 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:19Z","lastTransitionTime":"2025-10-11T04:52:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:19 crc kubenswrapper[4651]: I1011 04:52:19.708661 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:19 crc kubenswrapper[4651]: I1011 04:52:19.709060 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:19 crc kubenswrapper[4651]: I1011 04:52:19.709222 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:19 crc kubenswrapper[4651]: I1011 04:52:19.709356 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:19 crc kubenswrapper[4651]: I1011 04:52:19.709486 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:19Z","lastTransitionTime":"2025-10-11T04:52:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:19 crc kubenswrapper[4651]: I1011 04:52:19.811546 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:19 crc kubenswrapper[4651]: I1011 04:52:19.811943 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:19 crc kubenswrapper[4651]: I1011 04:52:19.812166 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:19 crc kubenswrapper[4651]: I1011 04:52:19.812362 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:19 crc kubenswrapper[4651]: I1011 04:52:19.812747 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:19Z","lastTransitionTime":"2025-10-11T04:52:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:19 crc kubenswrapper[4651]: I1011 04:52:19.869364 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 04:52:19 crc kubenswrapper[4651]: E1011 04:52:19.869734 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 04:52:19 crc kubenswrapper[4651]: I1011 04:52:19.869979 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 04:52:19 crc kubenswrapper[4651]: I1011 04:52:19.870216 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tgvv8" Oct 11 04:52:19 crc kubenswrapper[4651]: E1011 04:52:19.870371 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 04:52:19 crc kubenswrapper[4651]: E1011 04:52:19.870722 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tgvv8" podUID="a551fed8-58fb-48ae-88af-8dc0cb48fc30" Oct 11 04:52:19 crc kubenswrapper[4651]: I1011 04:52:19.887600 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d425a775a5e278ee77231764e58cac3441e04c383eee54f69ba84488ca640eae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:19Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:19 crc kubenswrapper[4651]: I1011 04:52:19.904099 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55e4c9c2d46f67bb0f8ccdb2de27dc82f0280476613b3b30d587ff6f5784e787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:19Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:19 crc kubenswrapper[4651]: I1011 04:52:19.916530 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:19 crc kubenswrapper[4651]: I1011 04:52:19.916575 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:19 crc kubenswrapper[4651]: I1011 04:52:19.916587 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:19 crc kubenswrapper[4651]: I1011 04:52:19.916607 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:19 crc kubenswrapper[4651]: I1011 04:52:19.916619 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:19Z","lastTransitionTime":"2025-10-11T04:52:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:19 crc kubenswrapper[4651]: I1011 04:52:19.920859 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e5b8526fed77f2e541d6793dc36a902daebce60d367f28f9efb45f7fabb44fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e278c9a5d14f026733737226a6ffc91202b760e48622d5d0031678981fa5f857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:19Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:19 crc kubenswrapper[4651]: I1011 04:52:19.932669 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wz4hw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fbfdd781-994b-49b4-9c8e-edc0ea4145d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d37b5524c49533e7dad8b61f0e20b7d1e37b7ae1afc5b9bfe9146a0744202a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c96nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wz4hw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:19Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:19 crc kubenswrapper[4651]: I1011 04:52:19.944391 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47b36edf-f918-4105-bb64-66bb71a4b8c4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9521c3af1580aa2a62fc760916de9dc4abdc437700567a6ae5df3a96da2e8942\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6fdcb798e11fca3b7b427f5ef0ac955a893709ffedaa472aaab6c219994e940\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff57b9678d77f617cfc792258091526c2a2f8125c240abfe0b15f693abc948eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11e12782179b96dac12bfdd3a25526e93f18da255e2d2fb0d7522e6d9c373b4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e14956f18479aa61fa0755b9c6c4f26ba7ac18f3023e177a76b9a59101b37c1\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-11T04:51:33Z\\\",\\\"message\\\":\\\"W1011 04:51:22.963111 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1011 04:51:22.963696 1 crypto.go:601] Generating new CA for check-endpoints-signer@1760158282 cert, and key in /tmp/serving-cert-3362973750/serving-signer.crt, /tmp/serving-cert-3362973750/serving-signer.key\\\\nI1011 04:51:23.197526 1 observer_polling.go:159] Starting file observer\\\\nW1011 04:51:23.200452 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1011 04:51:23.200572 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1011 04:51:23.202397 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3362973750/tls.crt::/tmp/serving-cert-3362973750/tls.key\\\\\\\"\\\\nF1011 04:51:33.422520 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f8633747abf191f040cb14f5a76529487743229338070aeca597fc93dae8b3f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55c5a1cac9b1045ca931771e5a096d29e8d986a4d9baae8d779d94f2a30b6cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55c5a1cac9b1045ca931771e5a096d29e8d986a4d9baae8d779d94f2a30b6cd3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:19Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:19 crc kubenswrapper[4651]: I1011 04:52:19.959137 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"744aa461-2868-440d-a1b9-6d30c0c50b56\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aabba06dd47ad528f830a47cc79ddabb3789d24ebed62a6e8f976df1b156ef1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb996f17546fc77ac1b8ed27428aaa6060822daa0156cf016dbb24ed278403a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76ee2612930be6596c0186cc000d80039b52e225fd64102002ed9316a0605521\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c6263dc47a8284e9849da1fb15c4788174252f3637665ac6e4b19298ca90c587\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:19Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:19 crc kubenswrapper[4651]: I1011 04:52:19.970454 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8dacbe38-be95-4b56-a204-f87d2e8d6496\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://918c77155c7ad4e14a9706e6e36a26cf2c774133b3435468d326b1b8c1f29f45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf6183b5381ec04f62d32175471097bd2d2088003063202375b88ccfb9080fae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2aaeb6d47f2435f2865c7516d976fecaf6de20b458b5cdcea1cdf59449cdef9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://96c869e21e725921f47799368a3327f628bbdd7d7db8b4d0f29bf27b4d04551b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96c869e21e725921f47799368a3327f628bbdd7d7db8b4d0f29bf27b4d04551b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:19Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:19Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:19 crc kubenswrapper[4651]: I1011 04:52:19.983854 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:19Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:19 crc kubenswrapper[4651]: I1011 04:52:19.994888 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"519a1ae1-e964-48b0-8b61-835146df28c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f12e36a570de649e1df8107ea828cd8e65c18e111de191d3796fc1f3134e43c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wkk99\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://583486a7d6efd8d25aaeac78ea92f84834730c641516b6ade77aeda2baef58e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wkk99\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-78jnv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:19Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:20 crc kubenswrapper[4651]: I1011 04:52:20.006234 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-phsgk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e8fb74d-013d-4103-b029-e8416d079dcf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b7407b7da9820bbcdac4ce9c49c4520834a466b0db187c5f861972713ba1583f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4ftf8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-phsgk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:20Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:20 crc kubenswrapper[4651]: I1011 04:52:20.017607 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:20Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:20 crc kubenswrapper[4651]: I1011 04:52:20.018417 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:20 crc kubenswrapper[4651]: I1011 04:52:20.018445 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:20 crc kubenswrapper[4651]: I1011 04:52:20.018453 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:20 crc kubenswrapper[4651]: I1011 04:52:20.018467 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:20 crc kubenswrapper[4651]: I1011 04:52:20.018476 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:20Z","lastTransitionTime":"2025-10-11T04:52:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:20 crc kubenswrapper[4651]: I1011 04:52:20.027643 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-tgvv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a551fed8-58fb-48ae-88af-8dc0cb48fc30\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:53Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:53Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:53Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nphhq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nphhq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:53Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-tgvv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:20Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:20 crc kubenswrapper[4651]: I1011 04:52:20.038624 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:20Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:20 crc kubenswrapper[4651]: I1011 04:52:20.047274 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kwhmr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbcac3cb-b774-47ec-a86c-b22191d14d99\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52a0f9e11cf4e8c69d5b356b702ce9df8cbc935daadadf531004a23e5f6dad65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7n87\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kwhmr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:20Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:20 crc kubenswrapper[4651]: I1011 04:52:20.062260 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0fd0aaae2760a1c934aed54b8e07528f0f78db1149c9e1f8674bec90b61a7078\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://788d6a46d740de869fe7cd1d28479f8115a3515c7bd2482e06cbc4dba7b27fac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68954082d4369b9b4d95ba1f15e19593dc330b698562be413055b2ba012b5f8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3eed8335754437c935f07223bf4c4f40f9ab0bbdc9a86b7610f7f6fd55140e37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23b3fde15e637081cbf5fad2e55ccea1f0fe63d39ff5f82a60ce05ffeef9a837\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99f927067aabe4690e99fe50a069afdf68b22d950a8ae141235571ced468c44e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0575c5b690a6b170f1e7dbb71dc3d09d4dbb86071a2363d6d7c4c85e2c78320\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0575c5b690a6b170f1e7dbb71dc3d09d4dbb86071a2363d6d7c4c85e2c78320\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-11T04:52:10Z\\\",\\\"message\\\":\\\"andler 8\\\\nI1011 04:52:10.796228 6293 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1011 04:52:10.796255 6293 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1011 04:52:10.796280 6293 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1011 04:52:10.796326 6293 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1011 04:52:10.796340 6293 handler.go:208] Removed *v1.Node event handler 2\\\\nI1011 04:52:10.796359 6293 handler.go:208] Removed *v1.Node event handler 7\\\\nI1011 04:52:10.796378 6293 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI1011 04:52:10.796447 6293 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1011 04:52:10.796476 6293 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI1011 04:52:10.796536 6293 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1011 04:52:10.796553 6293 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1011 04:52:10.796592 6293 factory.go:656] Stopping watch factory\\\\nI1011 04:52:10.796634 6293 ovnkube.go:599] Stopped ovnkube\\\\nI1011 04:52:10.796638 6293 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1011 04:52:10.796674 6293 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1011 04:52:10.796792 6293 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T04:52:10Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-6zt9s_openshift-ovn-kubernetes(28e01c08-a461-4f44-a49c-4bf92fd3a2ce)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://add3ed68e0bf59bc569248a3660cfc9a20995641ec65b0bf7133e47c366aad73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccd6180f4c5665d62ee9458795318ef2474c1938d19e6cbfec9c2174fe10e020\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ccd6180f4c5665d62ee9458795318ef2474c1938d19e6cbfec9c2174fe10e020\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6zt9s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:20Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:20 crc kubenswrapper[4651]: I1011 04:52:20.074940 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pgwvb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"215170a8-84a9-4e15-9b0e-c1200c680f30\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a036609457cdb5a74a4ccf65ffa2e502f32994e6e3f1ef8a4fd4b89c5454773\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c80b1c5385cb92e140c43bc982ee7cda6406eac9dfe8d9449eaf45332ccc506\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c80b1c5385cb92e140c43bc982ee7cda6406eac9dfe8d9449eaf45332ccc506\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1575fc8fd03d4047a3ed1c0c9df99b77e40296b876d2913a790128f39733d45e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1575fc8fd03d4047a3ed1c0c9df99b77e40296b876d2913a790128f39733d45e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://626e511c110034060639be8565505dc122e834fc63a76d9c4e21dd1dbd5d5a62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://626e511c110034060639be8565505dc122e834fc63a76d9c4e21dd1dbd5d5a62\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9d55cd3f36dcad738ffd05b10e3f665c6bbb949c25365340e70b3c4b53e4175\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9d55cd3f36dcad738ffd05b10e3f665c6bbb949c25365340e70b3c4b53e4175\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27f664812354e7e2ea5feaa075c7f9f72262ddf0a7959196f7a7b14be5af1924\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://27f664812354e7e2ea5feaa075c7f9f72262ddf0a7959196f7a7b14be5af1924\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3b9eccb883d262047b87eae2358f6d27cbd8df5b6917125a8a6259b2eb3433a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3b9eccb883d262047b87eae2358f6d27cbd8df5b6917125a8a6259b2eb3433a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pgwvb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:20Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:20 crc kubenswrapper[4651]: I1011 04:52:20.084502 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qsgwc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f5830d7-3c6f-48f0-b103-a228e7f8e448\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c307ee086caacd0a5299325126ff0b4b140aa1e18153b8f66696534543bb069\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pbjvz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e30f3d9db8e89696a8a98ab24ca995088afa72c957e19ca59ace228a23268d9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pbjvz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:52Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qsgwc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:20Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:20 crc kubenswrapper[4651]: I1011 04:52:20.121192 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:20 crc kubenswrapper[4651]: I1011 04:52:20.121232 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:20 crc kubenswrapper[4651]: I1011 04:52:20.121241 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:20 crc kubenswrapper[4651]: I1011 04:52:20.121255 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:20 crc kubenswrapper[4651]: I1011 04:52:20.121264 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:20Z","lastTransitionTime":"2025-10-11T04:52:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:20 crc kubenswrapper[4651]: I1011 04:52:20.223611 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:20 crc kubenswrapper[4651]: I1011 04:52:20.223677 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:20 crc kubenswrapper[4651]: I1011 04:52:20.223695 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:20 crc kubenswrapper[4651]: I1011 04:52:20.223720 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:20 crc kubenswrapper[4651]: I1011 04:52:20.223738 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:20Z","lastTransitionTime":"2025-10-11T04:52:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:20 crc kubenswrapper[4651]: I1011 04:52:20.325542 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:20 crc kubenswrapper[4651]: I1011 04:52:20.325793 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:20 crc kubenswrapper[4651]: I1011 04:52:20.325802 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:20 crc kubenswrapper[4651]: I1011 04:52:20.325833 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:20 crc kubenswrapper[4651]: I1011 04:52:20.325845 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:20Z","lastTransitionTime":"2025-10-11T04:52:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:20 crc kubenswrapper[4651]: I1011 04:52:20.436691 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:20 crc kubenswrapper[4651]: I1011 04:52:20.436737 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:20 crc kubenswrapper[4651]: I1011 04:52:20.436749 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:20 crc kubenswrapper[4651]: I1011 04:52:20.436765 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:20 crc kubenswrapper[4651]: I1011 04:52:20.436775 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:20Z","lastTransitionTime":"2025-10-11T04:52:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:20 crc kubenswrapper[4651]: I1011 04:52:20.539191 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:20 crc kubenswrapper[4651]: I1011 04:52:20.539227 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:20 crc kubenswrapper[4651]: I1011 04:52:20.539235 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:20 crc kubenswrapper[4651]: I1011 04:52:20.539251 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:20 crc kubenswrapper[4651]: I1011 04:52:20.539260 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:20Z","lastTransitionTime":"2025-10-11T04:52:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:20 crc kubenswrapper[4651]: I1011 04:52:20.641260 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:20 crc kubenswrapper[4651]: I1011 04:52:20.641299 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:20 crc kubenswrapper[4651]: I1011 04:52:20.641308 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:20 crc kubenswrapper[4651]: I1011 04:52:20.641322 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:20 crc kubenswrapper[4651]: I1011 04:52:20.641337 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:20Z","lastTransitionTime":"2025-10-11T04:52:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:20 crc kubenswrapper[4651]: I1011 04:52:20.743729 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:20 crc kubenswrapper[4651]: I1011 04:52:20.743768 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:20 crc kubenswrapper[4651]: I1011 04:52:20.743778 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:20 crc kubenswrapper[4651]: I1011 04:52:20.743792 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:20 crc kubenswrapper[4651]: I1011 04:52:20.743803 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:20Z","lastTransitionTime":"2025-10-11T04:52:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:20 crc kubenswrapper[4651]: I1011 04:52:20.847144 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:20 crc kubenswrapper[4651]: I1011 04:52:20.847403 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:20 crc kubenswrapper[4651]: I1011 04:52:20.847429 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:20 crc kubenswrapper[4651]: I1011 04:52:20.847462 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:20 crc kubenswrapper[4651]: I1011 04:52:20.847490 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:20Z","lastTransitionTime":"2025-10-11T04:52:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:20 crc kubenswrapper[4651]: I1011 04:52:20.868918 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 04:52:20 crc kubenswrapper[4651]: E1011 04:52:20.869065 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 04:52:20 crc kubenswrapper[4651]: I1011 04:52:20.949870 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:20 crc kubenswrapper[4651]: I1011 04:52:20.949947 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:20 crc kubenswrapper[4651]: I1011 04:52:20.949970 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:20 crc kubenswrapper[4651]: I1011 04:52:20.949999 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:20 crc kubenswrapper[4651]: I1011 04:52:20.950024 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:20Z","lastTransitionTime":"2025-10-11T04:52:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:21 crc kubenswrapper[4651]: I1011 04:52:21.052969 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:21 crc kubenswrapper[4651]: I1011 04:52:21.053015 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:21 crc kubenswrapper[4651]: I1011 04:52:21.053028 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:21 crc kubenswrapper[4651]: I1011 04:52:21.053047 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:21 crc kubenswrapper[4651]: I1011 04:52:21.053059 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:21Z","lastTransitionTime":"2025-10-11T04:52:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:21 crc kubenswrapper[4651]: I1011 04:52:21.156163 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:21 crc kubenswrapper[4651]: I1011 04:52:21.156200 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:21 crc kubenswrapper[4651]: I1011 04:52:21.156210 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:21 crc kubenswrapper[4651]: I1011 04:52:21.156224 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:21 crc kubenswrapper[4651]: I1011 04:52:21.156233 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:21Z","lastTransitionTime":"2025-10-11T04:52:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:21 crc kubenswrapper[4651]: I1011 04:52:21.258694 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:21 crc kubenswrapper[4651]: I1011 04:52:21.258861 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:21 crc kubenswrapper[4651]: I1011 04:52:21.258889 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:21 crc kubenswrapper[4651]: I1011 04:52:21.258912 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:21 crc kubenswrapper[4651]: I1011 04:52:21.258928 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:21Z","lastTransitionTime":"2025-10-11T04:52:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:21 crc kubenswrapper[4651]: I1011 04:52:21.362349 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:21 crc kubenswrapper[4651]: I1011 04:52:21.362411 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:21 crc kubenswrapper[4651]: I1011 04:52:21.362427 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:21 crc kubenswrapper[4651]: I1011 04:52:21.362453 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:21 crc kubenswrapper[4651]: I1011 04:52:21.362469 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:21Z","lastTransitionTime":"2025-10-11T04:52:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:21 crc kubenswrapper[4651]: I1011 04:52:21.465141 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:21 crc kubenswrapper[4651]: I1011 04:52:21.465180 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:21 crc kubenswrapper[4651]: I1011 04:52:21.465189 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:21 crc kubenswrapper[4651]: I1011 04:52:21.465207 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:21 crc kubenswrapper[4651]: I1011 04:52:21.465217 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:21Z","lastTransitionTime":"2025-10-11T04:52:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:21 crc kubenswrapper[4651]: I1011 04:52:21.568062 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:21 crc kubenswrapper[4651]: I1011 04:52:21.568113 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:21 crc kubenswrapper[4651]: I1011 04:52:21.568132 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:21 crc kubenswrapper[4651]: I1011 04:52:21.568155 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:21 crc kubenswrapper[4651]: I1011 04:52:21.568312 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:21Z","lastTransitionTime":"2025-10-11T04:52:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:21 crc kubenswrapper[4651]: I1011 04:52:21.670838 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:21 crc kubenswrapper[4651]: I1011 04:52:21.670878 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:21 crc kubenswrapper[4651]: I1011 04:52:21.670887 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:21 crc kubenswrapper[4651]: I1011 04:52:21.670902 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:21 crc kubenswrapper[4651]: I1011 04:52:21.670910 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:21Z","lastTransitionTime":"2025-10-11T04:52:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:21 crc kubenswrapper[4651]: I1011 04:52:21.773611 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:21 crc kubenswrapper[4651]: I1011 04:52:21.773658 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:21 crc kubenswrapper[4651]: I1011 04:52:21.773670 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:21 crc kubenswrapper[4651]: I1011 04:52:21.773688 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:21 crc kubenswrapper[4651]: I1011 04:52:21.773700 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:21Z","lastTransitionTime":"2025-10-11T04:52:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:21 crc kubenswrapper[4651]: I1011 04:52:21.868455 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 04:52:21 crc kubenswrapper[4651]: E1011 04:52:21.868648 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 04:52:21 crc kubenswrapper[4651]: I1011 04:52:21.868468 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 04:52:21 crc kubenswrapper[4651]: I1011 04:52:21.868770 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tgvv8" Oct 11 04:52:21 crc kubenswrapper[4651]: E1011 04:52:21.868896 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 04:52:21 crc kubenswrapper[4651]: E1011 04:52:21.869002 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tgvv8" podUID="a551fed8-58fb-48ae-88af-8dc0cb48fc30" Oct 11 04:52:21 crc kubenswrapper[4651]: I1011 04:52:21.876026 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:21 crc kubenswrapper[4651]: I1011 04:52:21.876065 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:21 crc kubenswrapper[4651]: I1011 04:52:21.876076 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:21 crc kubenswrapper[4651]: I1011 04:52:21.876093 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:21 crc kubenswrapper[4651]: I1011 04:52:21.876104 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:21Z","lastTransitionTime":"2025-10-11T04:52:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:21 crc kubenswrapper[4651]: I1011 04:52:21.978944 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:21 crc kubenswrapper[4651]: I1011 04:52:21.978980 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:21 crc kubenswrapper[4651]: I1011 04:52:21.978989 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:21 crc kubenswrapper[4651]: I1011 04:52:21.979003 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:21 crc kubenswrapper[4651]: I1011 04:52:21.979012 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:21Z","lastTransitionTime":"2025-10-11T04:52:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:22 crc kubenswrapper[4651]: I1011 04:52:22.081501 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:22 crc kubenswrapper[4651]: I1011 04:52:22.081759 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:22 crc kubenswrapper[4651]: I1011 04:52:22.081935 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:22 crc kubenswrapper[4651]: I1011 04:52:22.082157 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:22 crc kubenswrapper[4651]: I1011 04:52:22.082366 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:22Z","lastTransitionTime":"2025-10-11T04:52:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:22 crc kubenswrapper[4651]: I1011 04:52:22.185582 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:22 crc kubenswrapper[4651]: I1011 04:52:22.185923 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:22 crc kubenswrapper[4651]: I1011 04:52:22.185996 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:22 crc kubenswrapper[4651]: I1011 04:52:22.186072 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:22 crc kubenswrapper[4651]: I1011 04:52:22.186131 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:22Z","lastTransitionTime":"2025-10-11T04:52:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:22 crc kubenswrapper[4651]: I1011 04:52:22.287998 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:22 crc kubenswrapper[4651]: I1011 04:52:22.288218 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:22 crc kubenswrapper[4651]: I1011 04:52:22.288281 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:22 crc kubenswrapper[4651]: I1011 04:52:22.288347 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:22 crc kubenswrapper[4651]: I1011 04:52:22.288406 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:22Z","lastTransitionTime":"2025-10-11T04:52:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:22 crc kubenswrapper[4651]: I1011 04:52:22.390563 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:22 crc kubenswrapper[4651]: I1011 04:52:22.390782 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:22 crc kubenswrapper[4651]: I1011 04:52:22.390870 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:22 crc kubenswrapper[4651]: I1011 04:52:22.390943 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:22 crc kubenswrapper[4651]: I1011 04:52:22.391004 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:22Z","lastTransitionTime":"2025-10-11T04:52:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:22 crc kubenswrapper[4651]: I1011 04:52:22.493500 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:22 crc kubenswrapper[4651]: I1011 04:52:22.494012 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:22 crc kubenswrapper[4651]: I1011 04:52:22.494298 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:22 crc kubenswrapper[4651]: I1011 04:52:22.494623 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:22 crc kubenswrapper[4651]: I1011 04:52:22.494867 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:22Z","lastTransitionTime":"2025-10-11T04:52:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:22 crc kubenswrapper[4651]: I1011 04:52:22.599414 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:22 crc kubenswrapper[4651]: I1011 04:52:22.599908 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:22 crc kubenswrapper[4651]: I1011 04:52:22.600230 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:22 crc kubenswrapper[4651]: I1011 04:52:22.600555 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:22 crc kubenswrapper[4651]: I1011 04:52:22.600725 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:22Z","lastTransitionTime":"2025-10-11T04:52:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:22 crc kubenswrapper[4651]: I1011 04:52:22.703768 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:22 crc kubenswrapper[4651]: I1011 04:52:22.704129 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:22 crc kubenswrapper[4651]: I1011 04:52:22.704262 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:22 crc kubenswrapper[4651]: I1011 04:52:22.704412 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:22 crc kubenswrapper[4651]: I1011 04:52:22.704588 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:22Z","lastTransitionTime":"2025-10-11T04:52:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:22 crc kubenswrapper[4651]: I1011 04:52:22.807374 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:22 crc kubenswrapper[4651]: I1011 04:52:22.807789 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:22 crc kubenswrapper[4651]: I1011 04:52:22.807969 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:22 crc kubenswrapper[4651]: I1011 04:52:22.808129 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:22 crc kubenswrapper[4651]: I1011 04:52:22.808256 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:22Z","lastTransitionTime":"2025-10-11T04:52:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:22 crc kubenswrapper[4651]: I1011 04:52:22.869372 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 04:52:22 crc kubenswrapper[4651]: E1011 04:52:22.869489 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 04:52:22 crc kubenswrapper[4651]: I1011 04:52:22.911258 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:22 crc kubenswrapper[4651]: I1011 04:52:22.911523 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:22 crc kubenswrapper[4651]: I1011 04:52:22.911600 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:22 crc kubenswrapper[4651]: I1011 04:52:22.911669 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:22 crc kubenswrapper[4651]: I1011 04:52:22.911747 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:22Z","lastTransitionTime":"2025-10-11T04:52:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:23 crc kubenswrapper[4651]: I1011 04:52:23.014717 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:23 crc kubenswrapper[4651]: I1011 04:52:23.014783 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:23 crc kubenswrapper[4651]: I1011 04:52:23.014800 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:23 crc kubenswrapper[4651]: I1011 04:52:23.014863 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:23 crc kubenswrapper[4651]: I1011 04:52:23.014881 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:23Z","lastTransitionTime":"2025-10-11T04:52:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:23 crc kubenswrapper[4651]: I1011 04:52:23.117995 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:23 crc kubenswrapper[4651]: I1011 04:52:23.118045 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:23 crc kubenswrapper[4651]: I1011 04:52:23.118062 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:23 crc kubenswrapper[4651]: I1011 04:52:23.118086 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:23 crc kubenswrapper[4651]: I1011 04:52:23.118103 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:23Z","lastTransitionTime":"2025-10-11T04:52:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:23 crc kubenswrapper[4651]: I1011 04:52:23.221451 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:23 crc kubenswrapper[4651]: I1011 04:52:23.221497 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:23 crc kubenswrapper[4651]: I1011 04:52:23.221505 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:23 crc kubenswrapper[4651]: I1011 04:52:23.221522 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:23 crc kubenswrapper[4651]: I1011 04:52:23.221533 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:23Z","lastTransitionTime":"2025-10-11T04:52:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:23 crc kubenswrapper[4651]: I1011 04:52:23.324092 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:23 crc kubenswrapper[4651]: I1011 04:52:23.324136 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:23 crc kubenswrapper[4651]: I1011 04:52:23.324151 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:23 crc kubenswrapper[4651]: I1011 04:52:23.324179 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:23 crc kubenswrapper[4651]: I1011 04:52:23.324198 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:23Z","lastTransitionTime":"2025-10-11T04:52:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:23 crc kubenswrapper[4651]: I1011 04:52:23.426296 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:23 crc kubenswrapper[4651]: I1011 04:52:23.426323 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:23 crc kubenswrapper[4651]: I1011 04:52:23.426331 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:23 crc kubenswrapper[4651]: I1011 04:52:23.426363 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:23 crc kubenswrapper[4651]: I1011 04:52:23.426374 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:23Z","lastTransitionTime":"2025-10-11T04:52:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:23 crc kubenswrapper[4651]: I1011 04:52:23.528268 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:23 crc kubenswrapper[4651]: I1011 04:52:23.528308 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:23 crc kubenswrapper[4651]: I1011 04:52:23.528317 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:23 crc kubenswrapper[4651]: I1011 04:52:23.528331 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:23 crc kubenswrapper[4651]: I1011 04:52:23.528339 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:23Z","lastTransitionTime":"2025-10-11T04:52:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:23 crc kubenswrapper[4651]: I1011 04:52:23.630208 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:23 crc kubenswrapper[4651]: I1011 04:52:23.630256 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:23 crc kubenswrapper[4651]: I1011 04:52:23.630269 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:23 crc kubenswrapper[4651]: I1011 04:52:23.630285 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:23 crc kubenswrapper[4651]: I1011 04:52:23.630296 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:23Z","lastTransitionTime":"2025-10-11T04:52:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:23 crc kubenswrapper[4651]: I1011 04:52:23.732536 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:23 crc kubenswrapper[4651]: I1011 04:52:23.732599 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:23 crc kubenswrapper[4651]: I1011 04:52:23.732609 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:23 crc kubenswrapper[4651]: I1011 04:52:23.732622 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:23 crc kubenswrapper[4651]: I1011 04:52:23.732631 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:23Z","lastTransitionTime":"2025-10-11T04:52:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:23 crc kubenswrapper[4651]: I1011 04:52:23.834751 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:23 crc kubenswrapper[4651]: I1011 04:52:23.834785 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:23 crc kubenswrapper[4651]: I1011 04:52:23.834795 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:23 crc kubenswrapper[4651]: I1011 04:52:23.834809 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:23 crc kubenswrapper[4651]: I1011 04:52:23.834833 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:23Z","lastTransitionTime":"2025-10-11T04:52:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:23 crc kubenswrapper[4651]: I1011 04:52:23.868833 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 04:52:23 crc kubenswrapper[4651]: I1011 04:52:23.868940 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tgvv8" Oct 11 04:52:23 crc kubenswrapper[4651]: E1011 04:52:23.869092 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 04:52:23 crc kubenswrapper[4651]: E1011 04:52:23.869491 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tgvv8" podUID="a551fed8-58fb-48ae-88af-8dc0cb48fc30" Oct 11 04:52:23 crc kubenswrapper[4651]: I1011 04:52:23.869622 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 04:52:23 crc kubenswrapper[4651]: E1011 04:52:23.869744 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 04:52:23 crc kubenswrapper[4651]: I1011 04:52:23.937748 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:23 crc kubenswrapper[4651]: I1011 04:52:23.937777 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:23 crc kubenswrapper[4651]: I1011 04:52:23.937786 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:23 crc kubenswrapper[4651]: I1011 04:52:23.937802 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:23 crc kubenswrapper[4651]: I1011 04:52:23.937811 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:23Z","lastTransitionTime":"2025-10-11T04:52:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:24 crc kubenswrapper[4651]: I1011 04:52:24.039765 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:24 crc kubenswrapper[4651]: I1011 04:52:24.039851 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:24 crc kubenswrapper[4651]: I1011 04:52:24.039876 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:24 crc kubenswrapper[4651]: I1011 04:52:24.039904 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:24 crc kubenswrapper[4651]: I1011 04:52:24.039920 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:24Z","lastTransitionTime":"2025-10-11T04:52:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:24 crc kubenswrapper[4651]: I1011 04:52:24.142436 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:24 crc kubenswrapper[4651]: I1011 04:52:24.142806 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:24 crc kubenswrapper[4651]: I1011 04:52:24.143019 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:24 crc kubenswrapper[4651]: I1011 04:52:24.143202 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:24 crc kubenswrapper[4651]: I1011 04:52:24.143376 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:24Z","lastTransitionTime":"2025-10-11T04:52:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:24 crc kubenswrapper[4651]: I1011 04:52:24.246253 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:24 crc kubenswrapper[4651]: I1011 04:52:24.246329 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:24 crc kubenswrapper[4651]: I1011 04:52:24.246341 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:24 crc kubenswrapper[4651]: I1011 04:52:24.246358 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:24 crc kubenswrapper[4651]: I1011 04:52:24.246396 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:24Z","lastTransitionTime":"2025-10-11T04:52:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:24 crc kubenswrapper[4651]: I1011 04:52:24.349638 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:24 crc kubenswrapper[4651]: I1011 04:52:24.349708 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:24 crc kubenswrapper[4651]: I1011 04:52:24.349721 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:24 crc kubenswrapper[4651]: I1011 04:52:24.349740 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:24 crc kubenswrapper[4651]: I1011 04:52:24.349777 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:24Z","lastTransitionTime":"2025-10-11T04:52:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:24 crc kubenswrapper[4651]: I1011 04:52:24.451714 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:24 crc kubenswrapper[4651]: I1011 04:52:24.451760 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:24 crc kubenswrapper[4651]: I1011 04:52:24.451770 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:24 crc kubenswrapper[4651]: I1011 04:52:24.451787 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:24 crc kubenswrapper[4651]: I1011 04:52:24.451800 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:24Z","lastTransitionTime":"2025-10-11T04:52:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:24 crc kubenswrapper[4651]: I1011 04:52:24.553720 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:24 crc kubenswrapper[4651]: I1011 04:52:24.553768 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:24 crc kubenswrapper[4651]: I1011 04:52:24.553781 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:24 crc kubenswrapper[4651]: I1011 04:52:24.553799 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:24 crc kubenswrapper[4651]: I1011 04:52:24.553811 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:24Z","lastTransitionTime":"2025-10-11T04:52:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:24 crc kubenswrapper[4651]: I1011 04:52:24.655776 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:24 crc kubenswrapper[4651]: I1011 04:52:24.655810 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:24 crc kubenswrapper[4651]: I1011 04:52:24.655848 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:24 crc kubenswrapper[4651]: I1011 04:52:24.655866 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:24 crc kubenswrapper[4651]: I1011 04:52:24.655877 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:24Z","lastTransitionTime":"2025-10-11T04:52:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:24 crc kubenswrapper[4651]: I1011 04:52:24.758059 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:24 crc kubenswrapper[4651]: I1011 04:52:24.758102 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:24 crc kubenswrapper[4651]: I1011 04:52:24.758115 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:24 crc kubenswrapper[4651]: I1011 04:52:24.758131 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:24 crc kubenswrapper[4651]: I1011 04:52:24.758140 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:24Z","lastTransitionTime":"2025-10-11T04:52:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:24 crc kubenswrapper[4651]: I1011 04:52:24.860281 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:24 crc kubenswrapper[4651]: I1011 04:52:24.860337 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:24 crc kubenswrapper[4651]: I1011 04:52:24.860364 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:24 crc kubenswrapper[4651]: I1011 04:52:24.860388 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:24 crc kubenswrapper[4651]: I1011 04:52:24.860407 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:24Z","lastTransitionTime":"2025-10-11T04:52:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:24 crc kubenswrapper[4651]: I1011 04:52:24.868802 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 04:52:24 crc kubenswrapper[4651]: E1011 04:52:24.868914 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 04:52:24 crc kubenswrapper[4651]: I1011 04:52:24.964202 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:24 crc kubenswrapper[4651]: I1011 04:52:24.964548 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:24 crc kubenswrapper[4651]: I1011 04:52:24.964675 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:24 crc kubenswrapper[4651]: I1011 04:52:24.964811 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:24 crc kubenswrapper[4651]: I1011 04:52:24.964983 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:24Z","lastTransitionTime":"2025-10-11T04:52:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:25 crc kubenswrapper[4651]: I1011 04:52:25.067157 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:25 crc kubenswrapper[4651]: I1011 04:52:25.067218 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:25 crc kubenswrapper[4651]: I1011 04:52:25.067231 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:25 crc kubenswrapper[4651]: I1011 04:52:25.067273 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:25 crc kubenswrapper[4651]: I1011 04:52:25.067285 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:25Z","lastTransitionTime":"2025-10-11T04:52:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:25 crc kubenswrapper[4651]: I1011 04:52:25.169396 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:25 crc kubenswrapper[4651]: I1011 04:52:25.169432 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:25 crc kubenswrapper[4651]: I1011 04:52:25.169443 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:25 crc kubenswrapper[4651]: I1011 04:52:25.169502 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:25 crc kubenswrapper[4651]: I1011 04:52:25.169514 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:25Z","lastTransitionTime":"2025-10-11T04:52:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:25 crc kubenswrapper[4651]: I1011 04:52:25.271597 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:25 crc kubenswrapper[4651]: I1011 04:52:25.271640 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:25 crc kubenswrapper[4651]: I1011 04:52:25.271652 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:25 crc kubenswrapper[4651]: I1011 04:52:25.271672 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:25 crc kubenswrapper[4651]: I1011 04:52:25.271687 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:25Z","lastTransitionTime":"2025-10-11T04:52:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:25 crc kubenswrapper[4651]: I1011 04:52:25.373582 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:25 crc kubenswrapper[4651]: I1011 04:52:25.373887 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:25 crc kubenswrapper[4651]: I1011 04:52:25.373975 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:25 crc kubenswrapper[4651]: I1011 04:52:25.374048 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:25 crc kubenswrapper[4651]: I1011 04:52:25.374122 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:25Z","lastTransitionTime":"2025-10-11T04:52:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:25 crc kubenswrapper[4651]: I1011 04:52:25.461846 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a551fed8-58fb-48ae-88af-8dc0cb48fc30-metrics-certs\") pod \"network-metrics-daemon-tgvv8\" (UID: \"a551fed8-58fb-48ae-88af-8dc0cb48fc30\") " pod="openshift-multus/network-metrics-daemon-tgvv8" Oct 11 04:52:25 crc kubenswrapper[4651]: E1011 04:52:25.462057 4651 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 11 04:52:25 crc kubenswrapper[4651]: E1011 04:52:25.462191 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a551fed8-58fb-48ae-88af-8dc0cb48fc30-metrics-certs podName:a551fed8-58fb-48ae-88af-8dc0cb48fc30 nodeName:}" failed. No retries permitted until 2025-10-11 04:52:57.462149095 +0000 UTC m=+98.358381901 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/a551fed8-58fb-48ae-88af-8dc0cb48fc30-metrics-certs") pod "network-metrics-daemon-tgvv8" (UID: "a551fed8-58fb-48ae-88af-8dc0cb48fc30") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 11 04:52:25 crc kubenswrapper[4651]: I1011 04:52:25.477180 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:25 crc kubenswrapper[4651]: I1011 04:52:25.477244 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:25 crc kubenswrapper[4651]: I1011 04:52:25.477264 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:25 crc kubenswrapper[4651]: I1011 04:52:25.477288 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:25 crc kubenswrapper[4651]: I1011 04:52:25.477304 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:25Z","lastTransitionTime":"2025-10-11T04:52:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:25 crc kubenswrapper[4651]: I1011 04:52:25.579508 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:25 crc kubenswrapper[4651]: I1011 04:52:25.579596 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:25 crc kubenswrapper[4651]: I1011 04:52:25.579645 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:25 crc kubenswrapper[4651]: I1011 04:52:25.579675 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:25 crc kubenswrapper[4651]: I1011 04:52:25.579691 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:25Z","lastTransitionTime":"2025-10-11T04:52:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:25 crc kubenswrapper[4651]: I1011 04:52:25.682280 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:25 crc kubenswrapper[4651]: I1011 04:52:25.682537 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:25 crc kubenswrapper[4651]: I1011 04:52:25.682603 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:25 crc kubenswrapper[4651]: I1011 04:52:25.682666 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:25 crc kubenswrapper[4651]: I1011 04:52:25.682731 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:25Z","lastTransitionTime":"2025-10-11T04:52:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:25 crc kubenswrapper[4651]: I1011 04:52:25.785164 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:25 crc kubenswrapper[4651]: I1011 04:52:25.785207 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:25 crc kubenswrapper[4651]: I1011 04:52:25.785215 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:25 crc kubenswrapper[4651]: I1011 04:52:25.785232 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:25 crc kubenswrapper[4651]: I1011 04:52:25.785244 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:25Z","lastTransitionTime":"2025-10-11T04:52:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:25 crc kubenswrapper[4651]: I1011 04:52:25.869038 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 04:52:25 crc kubenswrapper[4651]: I1011 04:52:25.869045 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tgvv8" Oct 11 04:52:25 crc kubenswrapper[4651]: E1011 04:52:25.869357 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 04:52:25 crc kubenswrapper[4651]: E1011 04:52:25.869518 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tgvv8" podUID="a551fed8-58fb-48ae-88af-8dc0cb48fc30" Oct 11 04:52:25 crc kubenswrapper[4651]: I1011 04:52:25.869060 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 04:52:25 crc kubenswrapper[4651]: E1011 04:52:25.869749 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 04:52:25 crc kubenswrapper[4651]: I1011 04:52:25.887163 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:25 crc kubenswrapper[4651]: I1011 04:52:25.887245 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:25 crc kubenswrapper[4651]: I1011 04:52:25.887268 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:25 crc kubenswrapper[4651]: I1011 04:52:25.887298 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:25 crc kubenswrapper[4651]: I1011 04:52:25.887321 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:25Z","lastTransitionTime":"2025-10-11T04:52:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:25 crc kubenswrapper[4651]: I1011 04:52:25.989733 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:25 crc kubenswrapper[4651]: I1011 04:52:25.990012 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:25 crc kubenswrapper[4651]: I1011 04:52:25.990101 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:25 crc kubenswrapper[4651]: I1011 04:52:25.990195 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:25 crc kubenswrapper[4651]: I1011 04:52:25.990283 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:25Z","lastTransitionTime":"2025-10-11T04:52:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:26 crc kubenswrapper[4651]: I1011 04:52:26.093347 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:26 crc kubenswrapper[4651]: I1011 04:52:26.093393 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:26 crc kubenswrapper[4651]: I1011 04:52:26.093404 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:26 crc kubenswrapper[4651]: I1011 04:52:26.093424 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:26 crc kubenswrapper[4651]: I1011 04:52:26.093436 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:26Z","lastTransitionTime":"2025-10-11T04:52:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:26 crc kubenswrapper[4651]: I1011 04:52:26.195857 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:26 crc kubenswrapper[4651]: I1011 04:52:26.195909 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:26 crc kubenswrapper[4651]: I1011 04:52:26.195928 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:26 crc kubenswrapper[4651]: I1011 04:52:26.195963 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:26 crc kubenswrapper[4651]: I1011 04:52:26.195976 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:26Z","lastTransitionTime":"2025-10-11T04:52:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:26 crc kubenswrapper[4651]: I1011 04:52:26.299429 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:26 crc kubenswrapper[4651]: I1011 04:52:26.299506 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:26 crc kubenswrapper[4651]: I1011 04:52:26.299522 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:26 crc kubenswrapper[4651]: I1011 04:52:26.299549 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:26 crc kubenswrapper[4651]: I1011 04:52:26.299566 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:26Z","lastTransitionTime":"2025-10-11T04:52:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:26 crc kubenswrapper[4651]: I1011 04:52:26.405499 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:26 crc kubenswrapper[4651]: I1011 04:52:26.405569 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:26 crc kubenswrapper[4651]: I1011 04:52:26.405587 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:26 crc kubenswrapper[4651]: I1011 04:52:26.405618 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:26 crc kubenswrapper[4651]: I1011 04:52:26.405683 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:26Z","lastTransitionTime":"2025-10-11T04:52:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:26 crc kubenswrapper[4651]: I1011 04:52:26.509657 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:26 crc kubenswrapper[4651]: I1011 04:52:26.509705 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:26 crc kubenswrapper[4651]: I1011 04:52:26.509715 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:26 crc kubenswrapper[4651]: I1011 04:52:26.509754 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:26 crc kubenswrapper[4651]: I1011 04:52:26.509764 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:26Z","lastTransitionTime":"2025-10-11T04:52:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:26 crc kubenswrapper[4651]: I1011 04:52:26.613227 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:26 crc kubenswrapper[4651]: I1011 04:52:26.613298 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:26 crc kubenswrapper[4651]: I1011 04:52:26.613315 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:26 crc kubenswrapper[4651]: I1011 04:52:26.613340 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:26 crc kubenswrapper[4651]: I1011 04:52:26.613359 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:26Z","lastTransitionTime":"2025-10-11T04:52:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:26 crc kubenswrapper[4651]: I1011 04:52:26.619131 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:26 crc kubenswrapper[4651]: I1011 04:52:26.619172 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:26 crc kubenswrapper[4651]: I1011 04:52:26.619185 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:26 crc kubenswrapper[4651]: I1011 04:52:26.619203 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:26 crc kubenswrapper[4651]: I1011 04:52:26.619215 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:26Z","lastTransitionTime":"2025-10-11T04:52:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:26 crc kubenswrapper[4651]: E1011 04:52:26.632641 4651 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:26Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:26Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a821a5c3-63e0-43db-82e0-e9c6e98ead52\\\",\\\"systemUUID\\\":\\\"f1c4ea71-0c28-43a7-99a4-e27ff72e186a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:26Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:26 crc kubenswrapper[4651]: I1011 04:52:26.637446 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:26 crc kubenswrapper[4651]: I1011 04:52:26.637568 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:26 crc kubenswrapper[4651]: I1011 04:52:26.637654 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:26 crc kubenswrapper[4651]: I1011 04:52:26.637721 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:26 crc kubenswrapper[4651]: I1011 04:52:26.637789 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:26Z","lastTransitionTime":"2025-10-11T04:52:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:26 crc kubenswrapper[4651]: E1011 04:52:26.649116 4651 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:26Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:26Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a821a5c3-63e0-43db-82e0-e9c6e98ead52\\\",\\\"systemUUID\\\":\\\"f1c4ea71-0c28-43a7-99a4-e27ff72e186a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:26Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:26 crc kubenswrapper[4651]: I1011 04:52:26.652850 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:26 crc kubenswrapper[4651]: I1011 04:52:26.652892 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:26 crc kubenswrapper[4651]: I1011 04:52:26.652905 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:26 crc kubenswrapper[4651]: I1011 04:52:26.652924 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:26 crc kubenswrapper[4651]: I1011 04:52:26.652937 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:26Z","lastTransitionTime":"2025-10-11T04:52:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:26 crc kubenswrapper[4651]: E1011 04:52:26.664407 4651 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:26Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:26Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a821a5c3-63e0-43db-82e0-e9c6e98ead52\\\",\\\"systemUUID\\\":\\\"f1c4ea71-0c28-43a7-99a4-e27ff72e186a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:26Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:26 crc kubenswrapper[4651]: I1011 04:52:26.668414 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:26 crc kubenswrapper[4651]: I1011 04:52:26.668451 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:26 crc kubenswrapper[4651]: I1011 04:52:26.668464 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:26 crc kubenswrapper[4651]: I1011 04:52:26.668482 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:26 crc kubenswrapper[4651]: I1011 04:52:26.668495 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:26Z","lastTransitionTime":"2025-10-11T04:52:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:26 crc kubenswrapper[4651]: E1011 04:52:26.681461 4651 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:26Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:26Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a821a5c3-63e0-43db-82e0-e9c6e98ead52\\\",\\\"systemUUID\\\":\\\"f1c4ea71-0c28-43a7-99a4-e27ff72e186a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:26Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:26 crc kubenswrapper[4651]: I1011 04:52:26.686404 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:26 crc kubenswrapper[4651]: I1011 04:52:26.686451 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:26 crc kubenswrapper[4651]: I1011 04:52:26.686467 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:26 crc kubenswrapper[4651]: I1011 04:52:26.686492 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:26 crc kubenswrapper[4651]: I1011 04:52:26.686511 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:26Z","lastTransitionTime":"2025-10-11T04:52:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:26 crc kubenswrapper[4651]: E1011 04:52:26.700737 4651 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:26Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:26Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a821a5c3-63e0-43db-82e0-e9c6e98ead52\\\",\\\"systemUUID\\\":\\\"f1c4ea71-0c28-43a7-99a4-e27ff72e186a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:26Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:26 crc kubenswrapper[4651]: E1011 04:52:26.700867 4651 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 11 04:52:26 crc kubenswrapper[4651]: I1011 04:52:26.716232 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:26 crc kubenswrapper[4651]: I1011 04:52:26.716278 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:26 crc kubenswrapper[4651]: I1011 04:52:26.716292 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:26 crc kubenswrapper[4651]: I1011 04:52:26.716313 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:26 crc kubenswrapper[4651]: I1011 04:52:26.716328 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:26Z","lastTransitionTime":"2025-10-11T04:52:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:26 crc kubenswrapper[4651]: I1011 04:52:26.819129 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:26 crc kubenswrapper[4651]: I1011 04:52:26.819163 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:26 crc kubenswrapper[4651]: I1011 04:52:26.819172 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:26 crc kubenswrapper[4651]: I1011 04:52:26.819185 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:26 crc kubenswrapper[4651]: I1011 04:52:26.819196 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:26Z","lastTransitionTime":"2025-10-11T04:52:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:26 crc kubenswrapper[4651]: I1011 04:52:26.869451 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 04:52:26 crc kubenswrapper[4651]: E1011 04:52:26.869655 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 04:52:26 crc kubenswrapper[4651]: I1011 04:52:26.870733 4651 scope.go:117] "RemoveContainer" containerID="e0575c5b690a6b170f1e7dbb71dc3d09d4dbb86071a2363d6d7c4c85e2c78320" Oct 11 04:52:26 crc kubenswrapper[4651]: E1011 04:52:26.871035 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-6zt9s_openshift-ovn-kubernetes(28e01c08-a461-4f44-a49c-4bf92fd3a2ce)\"" pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" podUID="28e01c08-a461-4f44-a49c-4bf92fd3a2ce" Oct 11 04:52:26 crc kubenswrapper[4651]: I1011 04:52:26.922049 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:26 crc kubenswrapper[4651]: I1011 04:52:26.922449 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:26 crc kubenswrapper[4651]: I1011 04:52:26.922467 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:26 crc kubenswrapper[4651]: I1011 04:52:26.922493 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:26 crc kubenswrapper[4651]: I1011 04:52:26.922519 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:26Z","lastTransitionTime":"2025-10-11T04:52:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:27 crc kubenswrapper[4651]: I1011 04:52:27.024570 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:27 crc kubenswrapper[4651]: I1011 04:52:27.024601 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:27 crc kubenswrapper[4651]: I1011 04:52:27.024611 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:27 crc kubenswrapper[4651]: I1011 04:52:27.024624 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:27 crc kubenswrapper[4651]: I1011 04:52:27.024634 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:27Z","lastTransitionTime":"2025-10-11T04:52:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:27 crc kubenswrapper[4651]: I1011 04:52:27.127585 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:27 crc kubenswrapper[4651]: I1011 04:52:27.127626 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:27 crc kubenswrapper[4651]: I1011 04:52:27.127635 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:27 crc kubenswrapper[4651]: I1011 04:52:27.127650 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:27 crc kubenswrapper[4651]: I1011 04:52:27.127661 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:27Z","lastTransitionTime":"2025-10-11T04:52:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:27 crc kubenswrapper[4651]: I1011 04:52:27.230306 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:27 crc kubenswrapper[4651]: I1011 04:52:27.230348 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:27 crc kubenswrapper[4651]: I1011 04:52:27.230359 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:27 crc kubenswrapper[4651]: I1011 04:52:27.230374 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:27 crc kubenswrapper[4651]: I1011 04:52:27.230385 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:27Z","lastTransitionTime":"2025-10-11T04:52:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:27 crc kubenswrapper[4651]: I1011 04:52:27.333515 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:27 crc kubenswrapper[4651]: I1011 04:52:27.333551 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:27 crc kubenswrapper[4651]: I1011 04:52:27.333560 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:27 crc kubenswrapper[4651]: I1011 04:52:27.333575 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:27 crc kubenswrapper[4651]: I1011 04:52:27.333585 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:27Z","lastTransitionTime":"2025-10-11T04:52:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:27 crc kubenswrapper[4651]: I1011 04:52:27.436367 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:27 crc kubenswrapper[4651]: I1011 04:52:27.436405 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:27 crc kubenswrapper[4651]: I1011 04:52:27.436416 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:27 crc kubenswrapper[4651]: I1011 04:52:27.436433 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:27 crc kubenswrapper[4651]: I1011 04:52:27.436447 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:27Z","lastTransitionTime":"2025-10-11T04:52:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:27 crc kubenswrapper[4651]: I1011 04:52:27.539084 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:27 crc kubenswrapper[4651]: I1011 04:52:27.539120 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:27 crc kubenswrapper[4651]: I1011 04:52:27.539129 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:27 crc kubenswrapper[4651]: I1011 04:52:27.539144 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:27 crc kubenswrapper[4651]: I1011 04:52:27.539156 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:27Z","lastTransitionTime":"2025-10-11T04:52:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:27 crc kubenswrapper[4651]: I1011 04:52:27.641563 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:27 crc kubenswrapper[4651]: I1011 04:52:27.641650 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:27 crc kubenswrapper[4651]: I1011 04:52:27.641665 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:27 crc kubenswrapper[4651]: I1011 04:52:27.641684 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:27 crc kubenswrapper[4651]: I1011 04:52:27.641695 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:27Z","lastTransitionTime":"2025-10-11T04:52:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:27 crc kubenswrapper[4651]: I1011 04:52:27.744134 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:27 crc kubenswrapper[4651]: I1011 04:52:27.744215 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:27 crc kubenswrapper[4651]: I1011 04:52:27.744241 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:27 crc kubenswrapper[4651]: I1011 04:52:27.744269 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:27 crc kubenswrapper[4651]: I1011 04:52:27.744286 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:27Z","lastTransitionTime":"2025-10-11T04:52:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:27 crc kubenswrapper[4651]: I1011 04:52:27.847587 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:27 crc kubenswrapper[4651]: I1011 04:52:27.847634 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:27 crc kubenswrapper[4651]: I1011 04:52:27.847645 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:27 crc kubenswrapper[4651]: I1011 04:52:27.847664 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:27 crc kubenswrapper[4651]: I1011 04:52:27.847677 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:27Z","lastTransitionTime":"2025-10-11T04:52:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:27 crc kubenswrapper[4651]: I1011 04:52:27.868808 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 04:52:27 crc kubenswrapper[4651]: I1011 04:52:27.868871 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 04:52:27 crc kubenswrapper[4651]: I1011 04:52:27.868854 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tgvv8" Oct 11 04:52:27 crc kubenswrapper[4651]: E1011 04:52:27.869039 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 04:52:27 crc kubenswrapper[4651]: E1011 04:52:27.869162 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tgvv8" podUID="a551fed8-58fb-48ae-88af-8dc0cb48fc30" Oct 11 04:52:27 crc kubenswrapper[4651]: E1011 04:52:27.869249 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 04:52:27 crc kubenswrapper[4651]: I1011 04:52:27.949740 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:27 crc kubenswrapper[4651]: I1011 04:52:27.949853 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:27 crc kubenswrapper[4651]: I1011 04:52:27.949866 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:27 crc kubenswrapper[4651]: I1011 04:52:27.949882 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:27 crc kubenswrapper[4651]: I1011 04:52:27.949894 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:27Z","lastTransitionTime":"2025-10-11T04:52:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:28 crc kubenswrapper[4651]: I1011 04:52:28.052566 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:28 crc kubenswrapper[4651]: I1011 04:52:28.052629 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:28 crc kubenswrapper[4651]: I1011 04:52:28.052646 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:28 crc kubenswrapper[4651]: I1011 04:52:28.052670 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:28 crc kubenswrapper[4651]: I1011 04:52:28.052687 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:28Z","lastTransitionTime":"2025-10-11T04:52:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:28 crc kubenswrapper[4651]: I1011 04:52:28.155242 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:28 crc kubenswrapper[4651]: I1011 04:52:28.155285 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:28 crc kubenswrapper[4651]: I1011 04:52:28.155296 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:28 crc kubenswrapper[4651]: I1011 04:52:28.155322 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:28 crc kubenswrapper[4651]: I1011 04:52:28.155332 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:28Z","lastTransitionTime":"2025-10-11T04:52:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:28 crc kubenswrapper[4651]: I1011 04:52:28.258125 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:28 crc kubenswrapper[4651]: I1011 04:52:28.258172 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:28 crc kubenswrapper[4651]: I1011 04:52:28.258188 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:28 crc kubenswrapper[4651]: I1011 04:52:28.258210 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:28 crc kubenswrapper[4651]: I1011 04:52:28.258226 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:28Z","lastTransitionTime":"2025-10-11T04:52:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:28 crc kubenswrapper[4651]: I1011 04:52:28.294897 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-wz4hw_fbfdd781-994b-49b4-9c8e-edc0ea4145d1/kube-multus/0.log" Oct 11 04:52:28 crc kubenswrapper[4651]: I1011 04:52:28.294938 4651 generic.go:334] "Generic (PLEG): container finished" podID="fbfdd781-994b-49b4-9c8e-edc0ea4145d1" containerID="2d37b5524c49533e7dad8b61f0e20b7d1e37b7ae1afc5b9bfe9146a0744202a2" exitCode=1 Oct 11 04:52:28 crc kubenswrapper[4651]: I1011 04:52:28.294962 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-wz4hw" event={"ID":"fbfdd781-994b-49b4-9c8e-edc0ea4145d1","Type":"ContainerDied","Data":"2d37b5524c49533e7dad8b61f0e20b7d1e37b7ae1afc5b9bfe9146a0744202a2"} Oct 11 04:52:28 crc kubenswrapper[4651]: I1011 04:52:28.295247 4651 scope.go:117] "RemoveContainer" containerID="2d37b5524c49533e7dad8b61f0e20b7d1e37b7ae1afc5b9bfe9146a0744202a2" Oct 11 04:52:28 crc kubenswrapper[4651]: I1011 04:52:28.305680 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-phsgk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e8fb74d-013d-4103-b029-e8416d079dcf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b7407b7da9820bbcdac4ce9c49c4520834a466b0db187c5f861972713ba1583f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4ftf8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-phsgk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:28Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:28 crc kubenswrapper[4651]: I1011 04:52:28.322780 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:28Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:28 crc kubenswrapper[4651]: I1011 04:52:28.336162 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-tgvv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a551fed8-58fb-48ae-88af-8dc0cb48fc30\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:53Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:53Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:53Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nphhq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nphhq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:53Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-tgvv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:28Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:28 crc kubenswrapper[4651]: I1011 04:52:28.349359 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qsgwc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f5830d7-3c6f-48f0-b103-a228e7f8e448\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c307ee086caacd0a5299325126ff0b4b140aa1e18153b8f66696534543bb069\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pbjvz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e30f3d9db8e89696a8a98ab24ca995088afa72c957e19ca59ace228a23268d9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pbjvz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:52Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qsgwc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:28Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:28 crc kubenswrapper[4651]: I1011 04:52:28.360812 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:28 crc kubenswrapper[4651]: I1011 04:52:28.361798 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:28 crc kubenswrapper[4651]: I1011 04:52:28.361807 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:28 crc kubenswrapper[4651]: I1011 04:52:28.361834 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:28 crc kubenswrapper[4651]: I1011 04:52:28.361845 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:28Z","lastTransitionTime":"2025-10-11T04:52:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:28 crc kubenswrapper[4651]: I1011 04:52:28.363765 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:28Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:28 crc kubenswrapper[4651]: I1011 04:52:28.377634 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kwhmr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbcac3cb-b774-47ec-a86c-b22191d14d99\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52a0f9e11cf4e8c69d5b356b702ce9df8cbc935daadadf531004a23e5f6dad65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7n87\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kwhmr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:28Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:28 crc kubenswrapper[4651]: I1011 04:52:28.405290 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0fd0aaae2760a1c934aed54b8e07528f0f78db1149c9e1f8674bec90b61a7078\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://788d6a46d740de869fe7cd1d28479f8115a3515c7bd2482e06cbc4dba7b27fac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68954082d4369b9b4d95ba1f15e19593dc330b698562be413055b2ba012b5f8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3eed8335754437c935f07223bf4c4f40f9ab0bbdc9a86b7610f7f6fd55140e37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23b3fde15e637081cbf5fad2e55ccea1f0fe63d39ff5f82a60ce05ffeef9a837\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99f927067aabe4690e99fe50a069afdf68b22d950a8ae141235571ced468c44e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0575c5b690a6b170f1e7dbb71dc3d09d4dbb86071a2363d6d7c4c85e2c78320\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0575c5b690a6b170f1e7dbb71dc3d09d4dbb86071a2363d6d7c4c85e2c78320\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-11T04:52:10Z\\\",\\\"message\\\":\\\"andler 8\\\\nI1011 04:52:10.796228 6293 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1011 04:52:10.796255 6293 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1011 04:52:10.796280 6293 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1011 04:52:10.796326 6293 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1011 04:52:10.796340 6293 handler.go:208] Removed *v1.Node event handler 2\\\\nI1011 04:52:10.796359 6293 handler.go:208] Removed *v1.Node event handler 7\\\\nI1011 04:52:10.796378 6293 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI1011 04:52:10.796447 6293 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1011 04:52:10.796476 6293 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI1011 04:52:10.796536 6293 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1011 04:52:10.796553 6293 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1011 04:52:10.796592 6293 factory.go:656] Stopping watch factory\\\\nI1011 04:52:10.796634 6293 ovnkube.go:599] Stopped ovnkube\\\\nI1011 04:52:10.796638 6293 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1011 04:52:10.796674 6293 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1011 04:52:10.796792 6293 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T04:52:10Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-6zt9s_openshift-ovn-kubernetes(28e01c08-a461-4f44-a49c-4bf92fd3a2ce)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://add3ed68e0bf59bc569248a3660cfc9a20995641ec65b0bf7133e47c366aad73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccd6180f4c5665d62ee9458795318ef2474c1938d19e6cbfec9c2174fe10e020\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ccd6180f4c5665d62ee9458795318ef2474c1938d19e6cbfec9c2174fe10e020\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6zt9s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:28Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:28 crc kubenswrapper[4651]: I1011 04:52:28.425901 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pgwvb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"215170a8-84a9-4e15-9b0e-c1200c680f30\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a036609457cdb5a74a4ccf65ffa2e502f32994e6e3f1ef8a4fd4b89c5454773\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c80b1c5385cb92e140c43bc982ee7cda6406eac9dfe8d9449eaf45332ccc506\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c80b1c5385cb92e140c43bc982ee7cda6406eac9dfe8d9449eaf45332ccc506\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1575fc8fd03d4047a3ed1c0c9df99b77e40296b876d2913a790128f39733d45e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1575fc8fd03d4047a3ed1c0c9df99b77e40296b876d2913a790128f39733d45e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://626e511c110034060639be8565505dc122e834fc63a76d9c4e21dd1dbd5d5a62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://626e511c110034060639be8565505dc122e834fc63a76d9c4e21dd1dbd5d5a62\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9d55cd3f36dcad738ffd05b10e3f665c6bbb949c25365340e70b3c4b53e4175\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9d55cd3f36dcad738ffd05b10e3f665c6bbb949c25365340e70b3c4b53e4175\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27f664812354e7e2ea5feaa075c7f9f72262ddf0a7959196f7a7b14be5af1924\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://27f664812354e7e2ea5feaa075c7f9f72262ddf0a7959196f7a7b14be5af1924\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3b9eccb883d262047b87eae2358f6d27cbd8df5b6917125a8a6259b2eb3433a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3b9eccb883d262047b87eae2358f6d27cbd8df5b6917125a8a6259b2eb3433a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pgwvb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:28Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:28 crc kubenswrapper[4651]: I1011 04:52:28.445278 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d425a775a5e278ee77231764e58cac3441e04c383eee54f69ba84488ca640eae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:28Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:28 crc kubenswrapper[4651]: I1011 04:52:28.457470 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55e4c9c2d46f67bb0f8ccdb2de27dc82f0280476613b3b30d587ff6f5784e787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:28Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:28 crc kubenswrapper[4651]: I1011 04:52:28.464420 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:28 crc kubenswrapper[4651]: I1011 04:52:28.464453 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:28 crc kubenswrapper[4651]: I1011 04:52:28.464463 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:28 crc kubenswrapper[4651]: I1011 04:52:28.464480 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:28 crc kubenswrapper[4651]: I1011 04:52:28.464489 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:28Z","lastTransitionTime":"2025-10-11T04:52:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:28 crc kubenswrapper[4651]: I1011 04:52:28.473930 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e5b8526fed77f2e541d6793dc36a902daebce60d367f28f9efb45f7fabb44fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e278c9a5d14f026733737226a6ffc91202b760e48622d5d0031678981fa5f857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:28Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:28 crc kubenswrapper[4651]: I1011 04:52:28.490255 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:28Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:28 crc kubenswrapper[4651]: I1011 04:52:28.504911 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"519a1ae1-e964-48b0-8b61-835146df28c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f12e36a570de649e1df8107ea828cd8e65c18e111de191d3796fc1f3134e43c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wkk99\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://583486a7d6efd8d25aaeac78ea92f84834730c641516b6ade77aeda2baef58e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wkk99\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-78jnv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:28Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:28 crc kubenswrapper[4651]: I1011 04:52:28.522171 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wz4hw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fbfdd781-994b-49b4-9c8e-edc0ea4145d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d37b5524c49533e7dad8b61f0e20b7d1e37b7ae1afc5b9bfe9146a0744202a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d37b5524c49533e7dad8b61f0e20b7d1e37b7ae1afc5b9bfe9146a0744202a2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-11T04:52:27Z\\\",\\\"message\\\":\\\"2025-10-11T04:51:42+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_6af57944-ba42-4ed4-a4aa-c5a5d8283647\\\\n2025-10-11T04:51:42+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_6af57944-ba42-4ed4-a4aa-c5a5d8283647 to /host/opt/cni/bin/\\\\n2025-10-11T04:51:42Z [verbose] multus-daemon started\\\\n2025-10-11T04:51:42Z [verbose] Readiness Indicator file check\\\\n2025-10-11T04:52:27Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c96nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wz4hw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:28Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:28 crc kubenswrapper[4651]: I1011 04:52:28.541524 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47b36edf-f918-4105-bb64-66bb71a4b8c4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9521c3af1580aa2a62fc760916de9dc4abdc437700567a6ae5df3a96da2e8942\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6fdcb798e11fca3b7b427f5ef0ac955a893709ffedaa472aaab6c219994e940\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff57b9678d77f617cfc792258091526c2a2f8125c240abfe0b15f693abc948eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11e12782179b96dac12bfdd3a25526e93f18da255e2d2fb0d7522e6d9c373b4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e14956f18479aa61fa0755b9c6c4f26ba7ac18f3023e177a76b9a59101b37c1\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-11T04:51:33Z\\\",\\\"message\\\":\\\"W1011 04:51:22.963111 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1011 04:51:22.963696 1 crypto.go:601] Generating new CA for check-endpoints-signer@1760158282 cert, and key in /tmp/serving-cert-3362973750/serving-signer.crt, /tmp/serving-cert-3362973750/serving-signer.key\\\\nI1011 04:51:23.197526 1 observer_polling.go:159] Starting file observer\\\\nW1011 04:51:23.200452 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1011 04:51:23.200572 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1011 04:51:23.202397 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3362973750/tls.crt::/tmp/serving-cert-3362973750/tls.key\\\\\\\"\\\\nF1011 04:51:33.422520 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f8633747abf191f040cb14f5a76529487743229338070aeca597fc93dae8b3f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55c5a1cac9b1045ca931771e5a096d29e8d986a4d9baae8d779d94f2a30b6cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55c5a1cac9b1045ca931771e5a096d29e8d986a4d9baae8d779d94f2a30b6cd3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:28Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:28 crc kubenswrapper[4651]: I1011 04:52:28.559173 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"744aa461-2868-440d-a1b9-6d30c0c50b56\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aabba06dd47ad528f830a47cc79ddabb3789d24ebed62a6e8f976df1b156ef1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb996f17546fc77ac1b8ed27428aaa6060822daa0156cf016dbb24ed278403a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76ee2612930be6596c0186cc000d80039b52e225fd64102002ed9316a0605521\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c6263dc47a8284e9849da1fb15c4788174252f3637665ac6e4b19298ca90c587\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:28Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:28 crc kubenswrapper[4651]: I1011 04:52:28.566444 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:28 crc kubenswrapper[4651]: I1011 04:52:28.566504 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:28 crc kubenswrapper[4651]: I1011 04:52:28.566516 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:28 crc kubenswrapper[4651]: I1011 04:52:28.566535 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:28 crc kubenswrapper[4651]: I1011 04:52:28.566547 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:28Z","lastTransitionTime":"2025-10-11T04:52:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:28 crc kubenswrapper[4651]: I1011 04:52:28.572841 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8dacbe38-be95-4b56-a204-f87d2e8d6496\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://918c77155c7ad4e14a9706e6e36a26cf2c774133b3435468d326b1b8c1f29f45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf6183b5381ec04f62d32175471097bd2d2088003063202375b88ccfb9080fae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2aaeb6d47f2435f2865c7516d976fecaf6de20b458b5cdcea1cdf59449cdef9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://96c869e21e725921f47799368a3327f628bbdd7d7db8b4d0f29bf27b4d04551b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96c869e21e725921f47799368a3327f628bbdd7d7db8b4d0f29bf27b4d04551b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:19Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:28Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:28 crc kubenswrapper[4651]: I1011 04:52:28.669140 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:28 crc kubenswrapper[4651]: I1011 04:52:28.669191 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:28 crc kubenswrapper[4651]: I1011 04:52:28.669210 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:28 crc kubenswrapper[4651]: I1011 04:52:28.669233 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:28 crc kubenswrapper[4651]: I1011 04:52:28.669249 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:28Z","lastTransitionTime":"2025-10-11T04:52:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:28 crc kubenswrapper[4651]: I1011 04:52:28.771720 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:28 crc kubenswrapper[4651]: I1011 04:52:28.771790 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:28 crc kubenswrapper[4651]: I1011 04:52:28.771801 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:28 crc kubenswrapper[4651]: I1011 04:52:28.771848 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:28 crc kubenswrapper[4651]: I1011 04:52:28.771861 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:28Z","lastTransitionTime":"2025-10-11T04:52:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:28 crc kubenswrapper[4651]: I1011 04:52:28.868579 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 04:52:28 crc kubenswrapper[4651]: E1011 04:52:28.868702 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 04:52:28 crc kubenswrapper[4651]: I1011 04:52:28.874256 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:28 crc kubenswrapper[4651]: I1011 04:52:28.874300 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:28 crc kubenswrapper[4651]: I1011 04:52:28.874309 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:28 crc kubenswrapper[4651]: I1011 04:52:28.874323 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:28 crc kubenswrapper[4651]: I1011 04:52:28.874332 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:28Z","lastTransitionTime":"2025-10-11T04:52:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:28 crc kubenswrapper[4651]: I1011 04:52:28.976545 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:28 crc kubenswrapper[4651]: I1011 04:52:28.976598 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:28 crc kubenswrapper[4651]: I1011 04:52:28.976610 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:28 crc kubenswrapper[4651]: I1011 04:52:28.976628 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:28 crc kubenswrapper[4651]: I1011 04:52:28.976641 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:28Z","lastTransitionTime":"2025-10-11T04:52:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:29 crc kubenswrapper[4651]: I1011 04:52:29.079522 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:29 crc kubenswrapper[4651]: I1011 04:52:29.079579 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:29 crc kubenswrapper[4651]: I1011 04:52:29.079589 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:29 crc kubenswrapper[4651]: I1011 04:52:29.079606 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:29 crc kubenswrapper[4651]: I1011 04:52:29.079616 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:29Z","lastTransitionTime":"2025-10-11T04:52:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:29 crc kubenswrapper[4651]: I1011 04:52:29.182251 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:29 crc kubenswrapper[4651]: I1011 04:52:29.182287 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:29 crc kubenswrapper[4651]: I1011 04:52:29.182298 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:29 crc kubenswrapper[4651]: I1011 04:52:29.182312 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:29 crc kubenswrapper[4651]: I1011 04:52:29.182321 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:29Z","lastTransitionTime":"2025-10-11T04:52:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:29 crc kubenswrapper[4651]: I1011 04:52:29.285087 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:29 crc kubenswrapper[4651]: I1011 04:52:29.285140 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:29 crc kubenswrapper[4651]: I1011 04:52:29.285157 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:29 crc kubenswrapper[4651]: I1011 04:52:29.285182 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:29 crc kubenswrapper[4651]: I1011 04:52:29.285200 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:29Z","lastTransitionTime":"2025-10-11T04:52:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:29 crc kubenswrapper[4651]: I1011 04:52:29.299141 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-wz4hw_fbfdd781-994b-49b4-9c8e-edc0ea4145d1/kube-multus/0.log" Oct 11 04:52:29 crc kubenswrapper[4651]: I1011 04:52:29.299197 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-wz4hw" event={"ID":"fbfdd781-994b-49b4-9c8e-edc0ea4145d1","Type":"ContainerStarted","Data":"453b8d5da6858078639895d4d19bb1783eefeeb7c558eec6984f6ebdadd5d8fc"} Oct 11 04:52:29 crc kubenswrapper[4651]: I1011 04:52:29.314980 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-phsgk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e8fb74d-013d-4103-b029-e8416d079dcf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b7407b7da9820bbcdac4ce9c49c4520834a466b0db187c5f861972713ba1583f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4ftf8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-phsgk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:29Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:29 crc kubenswrapper[4651]: I1011 04:52:29.333946 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:29Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:29 crc kubenswrapper[4651]: I1011 04:52:29.346918 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-tgvv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a551fed8-58fb-48ae-88af-8dc0cb48fc30\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:53Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:53Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:53Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nphhq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nphhq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:53Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-tgvv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:29Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:29 crc kubenswrapper[4651]: I1011 04:52:29.365639 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:29Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:29 crc kubenswrapper[4651]: I1011 04:52:29.380614 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kwhmr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbcac3cb-b774-47ec-a86c-b22191d14d99\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52a0f9e11cf4e8c69d5b356b702ce9df8cbc935daadadf531004a23e5f6dad65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7n87\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kwhmr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:29Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:29 crc kubenswrapper[4651]: I1011 04:52:29.388167 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:29 crc kubenswrapper[4651]: I1011 04:52:29.388206 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:29 crc kubenswrapper[4651]: I1011 04:52:29.388217 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:29 crc kubenswrapper[4651]: I1011 04:52:29.388235 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:29 crc kubenswrapper[4651]: I1011 04:52:29.388248 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:29Z","lastTransitionTime":"2025-10-11T04:52:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:29 crc kubenswrapper[4651]: I1011 04:52:29.409603 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0fd0aaae2760a1c934aed54b8e07528f0f78db1149c9e1f8674bec90b61a7078\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://788d6a46d740de869fe7cd1d28479f8115a3515c7bd2482e06cbc4dba7b27fac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68954082d4369b9b4d95ba1f15e19593dc330b698562be413055b2ba012b5f8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3eed8335754437c935f07223bf4c4f40f9ab0bbdc9a86b7610f7f6fd55140e37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23b3fde15e637081cbf5fad2e55ccea1f0fe63d39ff5f82a60ce05ffeef9a837\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99f927067aabe4690e99fe50a069afdf68b22d950a8ae141235571ced468c44e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0575c5b690a6b170f1e7dbb71dc3d09d4dbb86071a2363d6d7c4c85e2c78320\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0575c5b690a6b170f1e7dbb71dc3d09d4dbb86071a2363d6d7c4c85e2c78320\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-11T04:52:10Z\\\",\\\"message\\\":\\\"andler 8\\\\nI1011 04:52:10.796228 6293 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1011 04:52:10.796255 6293 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1011 04:52:10.796280 6293 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1011 04:52:10.796326 6293 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1011 04:52:10.796340 6293 handler.go:208] Removed *v1.Node event handler 2\\\\nI1011 04:52:10.796359 6293 handler.go:208] Removed *v1.Node event handler 7\\\\nI1011 04:52:10.796378 6293 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI1011 04:52:10.796447 6293 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1011 04:52:10.796476 6293 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI1011 04:52:10.796536 6293 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1011 04:52:10.796553 6293 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1011 04:52:10.796592 6293 factory.go:656] Stopping watch factory\\\\nI1011 04:52:10.796634 6293 ovnkube.go:599] Stopped ovnkube\\\\nI1011 04:52:10.796638 6293 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1011 04:52:10.796674 6293 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1011 04:52:10.796792 6293 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T04:52:10Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-6zt9s_openshift-ovn-kubernetes(28e01c08-a461-4f44-a49c-4bf92fd3a2ce)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://add3ed68e0bf59bc569248a3660cfc9a20995641ec65b0bf7133e47c366aad73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccd6180f4c5665d62ee9458795318ef2474c1938d19e6cbfec9c2174fe10e020\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ccd6180f4c5665d62ee9458795318ef2474c1938d19e6cbfec9c2174fe10e020\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6zt9s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:29Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:29 crc kubenswrapper[4651]: I1011 04:52:29.424457 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pgwvb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"215170a8-84a9-4e15-9b0e-c1200c680f30\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a036609457cdb5a74a4ccf65ffa2e502f32994e6e3f1ef8a4fd4b89c5454773\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c80b1c5385cb92e140c43bc982ee7cda6406eac9dfe8d9449eaf45332ccc506\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c80b1c5385cb92e140c43bc982ee7cda6406eac9dfe8d9449eaf45332ccc506\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1575fc8fd03d4047a3ed1c0c9df99b77e40296b876d2913a790128f39733d45e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1575fc8fd03d4047a3ed1c0c9df99b77e40296b876d2913a790128f39733d45e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://626e511c110034060639be8565505dc122e834fc63a76d9c4e21dd1dbd5d5a62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://626e511c110034060639be8565505dc122e834fc63a76d9c4e21dd1dbd5d5a62\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9d55cd3f36dcad738ffd05b10e3f665c6bbb949c25365340e70b3c4b53e4175\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9d55cd3f36dcad738ffd05b10e3f665c6bbb949c25365340e70b3c4b53e4175\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27f664812354e7e2ea5feaa075c7f9f72262ddf0a7959196f7a7b14be5af1924\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://27f664812354e7e2ea5feaa075c7f9f72262ddf0a7959196f7a7b14be5af1924\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3b9eccb883d262047b87eae2358f6d27cbd8df5b6917125a8a6259b2eb3433a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3b9eccb883d262047b87eae2358f6d27cbd8df5b6917125a8a6259b2eb3433a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pgwvb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:29Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:29 crc kubenswrapper[4651]: I1011 04:52:29.436434 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qsgwc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f5830d7-3c6f-48f0-b103-a228e7f8e448\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c307ee086caacd0a5299325126ff0b4b140aa1e18153b8f66696534543bb069\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pbjvz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e30f3d9db8e89696a8a98ab24ca995088afa72c957e19ca59ace228a23268d9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pbjvz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:52Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qsgwc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:29Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:29 crc kubenswrapper[4651]: I1011 04:52:29.451594 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d425a775a5e278ee77231764e58cac3441e04c383eee54f69ba84488ca640eae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:29Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:29 crc kubenswrapper[4651]: I1011 04:52:29.466124 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55e4c9c2d46f67bb0f8ccdb2de27dc82f0280476613b3b30d587ff6f5784e787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:29Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:29 crc kubenswrapper[4651]: I1011 04:52:29.478031 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e5b8526fed77f2e541d6793dc36a902daebce60d367f28f9efb45f7fabb44fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e278c9a5d14f026733737226a6ffc91202b760e48622d5d0031678981fa5f857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:29Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:29 crc kubenswrapper[4651]: I1011 04:52:29.490604 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:29 crc kubenswrapper[4651]: I1011 04:52:29.490698 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:29 crc kubenswrapper[4651]: I1011 04:52:29.490757 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:29 crc kubenswrapper[4651]: I1011 04:52:29.490842 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:29 crc kubenswrapper[4651]: I1011 04:52:29.490949 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:29Z","lastTransitionTime":"2025-10-11T04:52:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:29 crc kubenswrapper[4651]: I1011 04:52:29.496948 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47b36edf-f918-4105-bb64-66bb71a4b8c4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9521c3af1580aa2a62fc760916de9dc4abdc437700567a6ae5df3a96da2e8942\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6fdcb798e11fca3b7b427f5ef0ac955a893709ffedaa472aaab6c219994e940\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff57b9678d77f617cfc792258091526c2a2f8125c240abfe0b15f693abc948eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11e12782179b96dac12bfdd3a25526e93f18da255e2d2fb0d7522e6d9c373b4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e14956f18479aa61fa0755b9c6c4f26ba7ac18f3023e177a76b9a59101b37c1\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-11T04:51:33Z\\\",\\\"message\\\":\\\"W1011 04:51:22.963111 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1011 04:51:22.963696 1 crypto.go:601] Generating new CA for check-endpoints-signer@1760158282 cert, and key in /tmp/serving-cert-3362973750/serving-signer.crt, /tmp/serving-cert-3362973750/serving-signer.key\\\\nI1011 04:51:23.197526 1 observer_polling.go:159] Starting file observer\\\\nW1011 04:51:23.200452 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1011 04:51:23.200572 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1011 04:51:23.202397 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3362973750/tls.crt::/tmp/serving-cert-3362973750/tls.key\\\\\\\"\\\\nF1011 04:51:33.422520 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f8633747abf191f040cb14f5a76529487743229338070aeca597fc93dae8b3f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55c5a1cac9b1045ca931771e5a096d29e8d986a4d9baae8d779d94f2a30b6cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55c5a1cac9b1045ca931771e5a096d29e8d986a4d9baae8d779d94f2a30b6cd3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:29Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:29 crc kubenswrapper[4651]: I1011 04:52:29.510163 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"744aa461-2868-440d-a1b9-6d30c0c50b56\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aabba06dd47ad528f830a47cc79ddabb3789d24ebed62a6e8f976df1b156ef1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb996f17546fc77ac1b8ed27428aaa6060822daa0156cf016dbb24ed278403a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76ee2612930be6596c0186cc000d80039b52e225fd64102002ed9316a0605521\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c6263dc47a8284e9849da1fb15c4788174252f3637665ac6e4b19298ca90c587\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:29Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:29 crc kubenswrapper[4651]: I1011 04:52:29.524613 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8dacbe38-be95-4b56-a204-f87d2e8d6496\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://918c77155c7ad4e14a9706e6e36a26cf2c774133b3435468d326b1b8c1f29f45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf6183b5381ec04f62d32175471097bd2d2088003063202375b88ccfb9080fae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2aaeb6d47f2435f2865c7516d976fecaf6de20b458b5cdcea1cdf59449cdef9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://96c869e21e725921f47799368a3327f628bbdd7d7db8b4d0f29bf27b4d04551b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96c869e21e725921f47799368a3327f628bbdd7d7db8b4d0f29bf27b4d04551b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:19Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:29Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:29 crc kubenswrapper[4651]: I1011 04:52:29.539194 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:29Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:29 crc kubenswrapper[4651]: I1011 04:52:29.551983 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"519a1ae1-e964-48b0-8b61-835146df28c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f12e36a570de649e1df8107ea828cd8e65c18e111de191d3796fc1f3134e43c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wkk99\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://583486a7d6efd8d25aaeac78ea92f84834730c641516b6ade77aeda2baef58e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wkk99\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-78jnv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:29Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:29 crc kubenswrapper[4651]: I1011 04:52:29.565398 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wz4hw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fbfdd781-994b-49b4-9c8e-edc0ea4145d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://453b8d5da6858078639895d4d19bb1783eefeeb7c558eec6984f6ebdadd5d8fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d37b5524c49533e7dad8b61f0e20b7d1e37b7ae1afc5b9bfe9146a0744202a2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-11T04:52:27Z\\\",\\\"message\\\":\\\"2025-10-11T04:51:42+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_6af57944-ba42-4ed4-a4aa-c5a5d8283647\\\\n2025-10-11T04:51:42+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_6af57944-ba42-4ed4-a4aa-c5a5d8283647 to /host/opt/cni/bin/\\\\n2025-10-11T04:51:42Z [verbose] multus-daemon started\\\\n2025-10-11T04:51:42Z [verbose] Readiness Indicator file check\\\\n2025-10-11T04:52:27Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:52:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c96nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wz4hw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:29Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:29 crc kubenswrapper[4651]: I1011 04:52:29.593414 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:29 crc kubenswrapper[4651]: I1011 04:52:29.593448 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:29 crc kubenswrapper[4651]: I1011 04:52:29.593457 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:29 crc kubenswrapper[4651]: I1011 04:52:29.593471 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:29 crc kubenswrapper[4651]: I1011 04:52:29.593480 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:29Z","lastTransitionTime":"2025-10-11T04:52:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:29 crc kubenswrapper[4651]: I1011 04:52:29.695433 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:29 crc kubenswrapper[4651]: I1011 04:52:29.695476 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:29 crc kubenswrapper[4651]: I1011 04:52:29.695488 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:29 crc kubenswrapper[4651]: I1011 04:52:29.695504 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:29 crc kubenswrapper[4651]: I1011 04:52:29.695515 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:29Z","lastTransitionTime":"2025-10-11T04:52:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:29 crc kubenswrapper[4651]: I1011 04:52:29.797712 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:29 crc kubenswrapper[4651]: I1011 04:52:29.797753 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:29 crc kubenswrapper[4651]: I1011 04:52:29.797761 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:29 crc kubenswrapper[4651]: I1011 04:52:29.797775 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:29 crc kubenswrapper[4651]: I1011 04:52:29.797786 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:29Z","lastTransitionTime":"2025-10-11T04:52:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:29 crc kubenswrapper[4651]: I1011 04:52:29.868433 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 04:52:29 crc kubenswrapper[4651]: I1011 04:52:29.868505 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tgvv8" Oct 11 04:52:29 crc kubenswrapper[4651]: E1011 04:52:29.868565 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 04:52:29 crc kubenswrapper[4651]: E1011 04:52:29.868611 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tgvv8" podUID="a551fed8-58fb-48ae-88af-8dc0cb48fc30" Oct 11 04:52:29 crc kubenswrapper[4651]: I1011 04:52:29.868508 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 04:52:29 crc kubenswrapper[4651]: E1011 04:52:29.869017 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 04:52:29 crc kubenswrapper[4651]: I1011 04:52:29.884365 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pgwvb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"215170a8-84a9-4e15-9b0e-c1200c680f30\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a036609457cdb5a74a4ccf65ffa2e502f32994e6e3f1ef8a4fd4b89c5454773\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c80b1c5385cb92e140c43bc982ee7cda6406eac9dfe8d9449eaf45332ccc506\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c80b1c5385cb92e140c43bc982ee7cda6406eac9dfe8d9449eaf45332ccc506\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1575fc8fd03d4047a3ed1c0c9df99b77e40296b876d2913a790128f39733d45e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1575fc8fd03d4047a3ed1c0c9df99b77e40296b876d2913a790128f39733d45e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://626e511c110034060639be8565505dc122e834fc63a76d9c4e21dd1dbd5d5a62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://626e511c110034060639be8565505dc122e834fc63a76d9c4e21dd1dbd5d5a62\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9d55cd3f36dcad738ffd05b10e3f665c6bbb949c25365340e70b3c4b53e4175\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9d55cd3f36dcad738ffd05b10e3f665c6bbb949c25365340e70b3c4b53e4175\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27f664812354e7e2ea5feaa075c7f9f72262ddf0a7959196f7a7b14be5af1924\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://27f664812354e7e2ea5feaa075c7f9f72262ddf0a7959196f7a7b14be5af1924\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3b9eccb883d262047b87eae2358f6d27cbd8df5b6917125a8a6259b2eb3433a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3b9eccb883d262047b87eae2358f6d27cbd8df5b6917125a8a6259b2eb3433a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pgwvb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:29Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:29 crc kubenswrapper[4651]: I1011 04:52:29.896240 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qsgwc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f5830d7-3c6f-48f0-b103-a228e7f8e448\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c307ee086caacd0a5299325126ff0b4b140aa1e18153b8f66696534543bb069\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pbjvz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e30f3d9db8e89696a8a98ab24ca995088afa72c957e19ca59ace228a23268d9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pbjvz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:52Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qsgwc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:29Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:29 crc kubenswrapper[4651]: I1011 04:52:29.899416 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:29 crc kubenswrapper[4651]: I1011 04:52:29.899445 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:29 crc kubenswrapper[4651]: I1011 04:52:29.899457 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:29 crc kubenswrapper[4651]: I1011 04:52:29.899472 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:29 crc kubenswrapper[4651]: I1011 04:52:29.899483 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:29Z","lastTransitionTime":"2025-10-11T04:52:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:29 crc kubenswrapper[4651]: I1011 04:52:29.908261 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:29Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:29 crc kubenswrapper[4651]: I1011 04:52:29.918486 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kwhmr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbcac3cb-b774-47ec-a86c-b22191d14d99\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52a0f9e11cf4e8c69d5b356b702ce9df8cbc935daadadf531004a23e5f6dad65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7n87\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kwhmr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:29Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:29 crc kubenswrapper[4651]: I1011 04:52:29.936498 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0fd0aaae2760a1c934aed54b8e07528f0f78db1149c9e1f8674bec90b61a7078\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://788d6a46d740de869fe7cd1d28479f8115a3515c7bd2482e06cbc4dba7b27fac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68954082d4369b9b4d95ba1f15e19593dc330b698562be413055b2ba012b5f8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3eed8335754437c935f07223bf4c4f40f9ab0bbdc9a86b7610f7f6fd55140e37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23b3fde15e637081cbf5fad2e55ccea1f0fe63d39ff5f82a60ce05ffeef9a837\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99f927067aabe4690e99fe50a069afdf68b22d950a8ae141235571ced468c44e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0575c5b690a6b170f1e7dbb71dc3d09d4dbb86071a2363d6d7c4c85e2c78320\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0575c5b690a6b170f1e7dbb71dc3d09d4dbb86071a2363d6d7c4c85e2c78320\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-11T04:52:10Z\\\",\\\"message\\\":\\\"andler 8\\\\nI1011 04:52:10.796228 6293 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1011 04:52:10.796255 6293 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1011 04:52:10.796280 6293 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1011 04:52:10.796326 6293 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1011 04:52:10.796340 6293 handler.go:208] Removed *v1.Node event handler 2\\\\nI1011 04:52:10.796359 6293 handler.go:208] Removed *v1.Node event handler 7\\\\nI1011 04:52:10.796378 6293 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI1011 04:52:10.796447 6293 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1011 04:52:10.796476 6293 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI1011 04:52:10.796536 6293 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1011 04:52:10.796553 6293 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1011 04:52:10.796592 6293 factory.go:656] Stopping watch factory\\\\nI1011 04:52:10.796634 6293 ovnkube.go:599] Stopped ovnkube\\\\nI1011 04:52:10.796638 6293 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1011 04:52:10.796674 6293 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1011 04:52:10.796792 6293 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T04:52:10Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-6zt9s_openshift-ovn-kubernetes(28e01c08-a461-4f44-a49c-4bf92fd3a2ce)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://add3ed68e0bf59bc569248a3660cfc9a20995641ec65b0bf7133e47c366aad73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccd6180f4c5665d62ee9458795318ef2474c1938d19e6cbfec9c2174fe10e020\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ccd6180f4c5665d62ee9458795318ef2474c1938d19e6cbfec9c2174fe10e020\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6zt9s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:29Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:29 crc kubenswrapper[4651]: I1011 04:52:29.951079 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d425a775a5e278ee77231764e58cac3441e04c383eee54f69ba84488ca640eae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:29Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:29 crc kubenswrapper[4651]: I1011 04:52:29.963183 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55e4c9c2d46f67bb0f8ccdb2de27dc82f0280476613b3b30d587ff6f5784e787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:29Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:29 crc kubenswrapper[4651]: I1011 04:52:29.975462 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e5b8526fed77f2e541d6793dc36a902daebce60d367f28f9efb45f7fabb44fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e278c9a5d14f026733737226a6ffc91202b760e48622d5d0031678981fa5f857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:29Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:29 crc kubenswrapper[4651]: I1011 04:52:29.987654 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:29Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:29 crc kubenswrapper[4651]: I1011 04:52:29.996685 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"519a1ae1-e964-48b0-8b61-835146df28c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f12e36a570de649e1df8107ea828cd8e65c18e111de191d3796fc1f3134e43c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wkk99\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://583486a7d6efd8d25aaeac78ea92f84834730c641516b6ade77aeda2baef58e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wkk99\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-78jnv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:29Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:30 crc kubenswrapper[4651]: I1011 04:52:30.001683 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:30 crc kubenswrapper[4651]: I1011 04:52:30.001712 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:30 crc kubenswrapper[4651]: I1011 04:52:30.001722 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:30 crc kubenswrapper[4651]: I1011 04:52:30.001736 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:30 crc kubenswrapper[4651]: I1011 04:52:30.001747 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:30Z","lastTransitionTime":"2025-10-11T04:52:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:30 crc kubenswrapper[4651]: I1011 04:52:30.007938 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wz4hw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fbfdd781-994b-49b4-9c8e-edc0ea4145d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://453b8d5da6858078639895d4d19bb1783eefeeb7c558eec6984f6ebdadd5d8fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d37b5524c49533e7dad8b61f0e20b7d1e37b7ae1afc5b9bfe9146a0744202a2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-11T04:52:27Z\\\",\\\"message\\\":\\\"2025-10-11T04:51:42+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_6af57944-ba42-4ed4-a4aa-c5a5d8283647\\\\n2025-10-11T04:51:42+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_6af57944-ba42-4ed4-a4aa-c5a5d8283647 to /host/opt/cni/bin/\\\\n2025-10-11T04:51:42Z [verbose] multus-daemon started\\\\n2025-10-11T04:51:42Z [verbose] Readiness Indicator file check\\\\n2025-10-11T04:52:27Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:52:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c96nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wz4hw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:30Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:30 crc kubenswrapper[4651]: I1011 04:52:30.020417 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47b36edf-f918-4105-bb64-66bb71a4b8c4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9521c3af1580aa2a62fc760916de9dc4abdc437700567a6ae5df3a96da2e8942\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6fdcb798e11fca3b7b427f5ef0ac955a893709ffedaa472aaab6c219994e940\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff57b9678d77f617cfc792258091526c2a2f8125c240abfe0b15f693abc948eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11e12782179b96dac12bfdd3a25526e93f18da255e2d2fb0d7522e6d9c373b4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e14956f18479aa61fa0755b9c6c4f26ba7ac18f3023e177a76b9a59101b37c1\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-11T04:51:33Z\\\",\\\"message\\\":\\\"W1011 04:51:22.963111 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1011 04:51:22.963696 1 crypto.go:601] Generating new CA for check-endpoints-signer@1760158282 cert, and key in /tmp/serving-cert-3362973750/serving-signer.crt, /tmp/serving-cert-3362973750/serving-signer.key\\\\nI1011 04:51:23.197526 1 observer_polling.go:159] Starting file observer\\\\nW1011 04:51:23.200452 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1011 04:51:23.200572 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1011 04:51:23.202397 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3362973750/tls.crt::/tmp/serving-cert-3362973750/tls.key\\\\\\\"\\\\nF1011 04:51:33.422520 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f8633747abf191f040cb14f5a76529487743229338070aeca597fc93dae8b3f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55c5a1cac9b1045ca931771e5a096d29e8d986a4d9baae8d779d94f2a30b6cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55c5a1cac9b1045ca931771e5a096d29e8d986a4d9baae8d779d94f2a30b6cd3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:30Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:30 crc kubenswrapper[4651]: I1011 04:52:30.031561 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"744aa461-2868-440d-a1b9-6d30c0c50b56\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aabba06dd47ad528f830a47cc79ddabb3789d24ebed62a6e8f976df1b156ef1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb996f17546fc77ac1b8ed27428aaa6060822daa0156cf016dbb24ed278403a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76ee2612930be6596c0186cc000d80039b52e225fd64102002ed9316a0605521\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c6263dc47a8284e9849da1fb15c4788174252f3637665ac6e4b19298ca90c587\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:30Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:30 crc kubenswrapper[4651]: I1011 04:52:30.040304 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8dacbe38-be95-4b56-a204-f87d2e8d6496\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://918c77155c7ad4e14a9706e6e36a26cf2c774133b3435468d326b1b8c1f29f45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf6183b5381ec04f62d32175471097bd2d2088003063202375b88ccfb9080fae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2aaeb6d47f2435f2865c7516d976fecaf6de20b458b5cdcea1cdf59449cdef9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://96c869e21e725921f47799368a3327f628bbdd7d7db8b4d0f29bf27b4d04551b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96c869e21e725921f47799368a3327f628bbdd7d7db8b4d0f29bf27b4d04551b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:19Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:30Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:30 crc kubenswrapper[4651]: I1011 04:52:30.049270 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-tgvv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a551fed8-58fb-48ae-88af-8dc0cb48fc30\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:53Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:53Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:53Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nphhq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nphhq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:53Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-tgvv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:30Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:30 crc kubenswrapper[4651]: I1011 04:52:30.056915 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-phsgk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e8fb74d-013d-4103-b029-e8416d079dcf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b7407b7da9820bbcdac4ce9c49c4520834a466b0db187c5f861972713ba1583f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4ftf8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-phsgk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:30Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:30 crc kubenswrapper[4651]: I1011 04:52:30.065956 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:30Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:30 crc kubenswrapper[4651]: I1011 04:52:30.103700 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:30 crc kubenswrapper[4651]: I1011 04:52:30.103731 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:30 crc kubenswrapper[4651]: I1011 04:52:30.103766 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:30 crc kubenswrapper[4651]: I1011 04:52:30.103785 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:30 crc kubenswrapper[4651]: I1011 04:52:30.103795 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:30Z","lastTransitionTime":"2025-10-11T04:52:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:30 crc kubenswrapper[4651]: I1011 04:52:30.206256 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:30 crc kubenswrapper[4651]: I1011 04:52:30.206289 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:30 crc kubenswrapper[4651]: I1011 04:52:30.206299 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:30 crc kubenswrapper[4651]: I1011 04:52:30.206312 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:30 crc kubenswrapper[4651]: I1011 04:52:30.206321 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:30Z","lastTransitionTime":"2025-10-11T04:52:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:30 crc kubenswrapper[4651]: I1011 04:52:30.308291 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:30 crc kubenswrapper[4651]: I1011 04:52:30.308327 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:30 crc kubenswrapper[4651]: I1011 04:52:30.308336 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:30 crc kubenswrapper[4651]: I1011 04:52:30.308349 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:30 crc kubenswrapper[4651]: I1011 04:52:30.308358 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:30Z","lastTransitionTime":"2025-10-11T04:52:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:30 crc kubenswrapper[4651]: I1011 04:52:30.410847 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:30 crc kubenswrapper[4651]: I1011 04:52:30.410874 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:30 crc kubenswrapper[4651]: I1011 04:52:30.410882 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:30 crc kubenswrapper[4651]: I1011 04:52:30.410894 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:30 crc kubenswrapper[4651]: I1011 04:52:30.410903 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:30Z","lastTransitionTime":"2025-10-11T04:52:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:30 crc kubenswrapper[4651]: I1011 04:52:30.513275 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:30 crc kubenswrapper[4651]: I1011 04:52:30.513314 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:30 crc kubenswrapper[4651]: I1011 04:52:30.513324 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:30 crc kubenswrapper[4651]: I1011 04:52:30.513343 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:30 crc kubenswrapper[4651]: I1011 04:52:30.513353 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:30Z","lastTransitionTime":"2025-10-11T04:52:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:30 crc kubenswrapper[4651]: I1011 04:52:30.615381 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:30 crc kubenswrapper[4651]: I1011 04:52:30.615416 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:30 crc kubenswrapper[4651]: I1011 04:52:30.615428 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:30 crc kubenswrapper[4651]: I1011 04:52:30.615442 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:30 crc kubenswrapper[4651]: I1011 04:52:30.615451 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:30Z","lastTransitionTime":"2025-10-11T04:52:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:30 crc kubenswrapper[4651]: I1011 04:52:30.717538 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:30 crc kubenswrapper[4651]: I1011 04:52:30.717577 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:30 crc kubenswrapper[4651]: I1011 04:52:30.717589 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:30 crc kubenswrapper[4651]: I1011 04:52:30.717606 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:30 crc kubenswrapper[4651]: I1011 04:52:30.717619 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:30Z","lastTransitionTime":"2025-10-11T04:52:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:30 crc kubenswrapper[4651]: I1011 04:52:30.820570 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:30 crc kubenswrapper[4651]: I1011 04:52:30.820644 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:30 crc kubenswrapper[4651]: I1011 04:52:30.820666 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:30 crc kubenswrapper[4651]: I1011 04:52:30.820699 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:30 crc kubenswrapper[4651]: I1011 04:52:30.820723 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:30Z","lastTransitionTime":"2025-10-11T04:52:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:30 crc kubenswrapper[4651]: I1011 04:52:30.869035 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 04:52:30 crc kubenswrapper[4651]: E1011 04:52:30.869151 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 04:52:30 crc kubenswrapper[4651]: I1011 04:52:30.923047 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:30 crc kubenswrapper[4651]: I1011 04:52:30.923085 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:30 crc kubenswrapper[4651]: I1011 04:52:30.923093 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:30 crc kubenswrapper[4651]: I1011 04:52:30.923109 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:30 crc kubenswrapper[4651]: I1011 04:52:30.923119 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:30Z","lastTransitionTime":"2025-10-11T04:52:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:31 crc kubenswrapper[4651]: I1011 04:52:31.025237 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:31 crc kubenswrapper[4651]: I1011 04:52:31.025274 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:31 crc kubenswrapper[4651]: I1011 04:52:31.025284 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:31 crc kubenswrapper[4651]: I1011 04:52:31.025300 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:31 crc kubenswrapper[4651]: I1011 04:52:31.025313 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:31Z","lastTransitionTime":"2025-10-11T04:52:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:31 crc kubenswrapper[4651]: I1011 04:52:31.127722 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:31 crc kubenswrapper[4651]: I1011 04:52:31.127781 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:31 crc kubenswrapper[4651]: I1011 04:52:31.127798 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:31 crc kubenswrapper[4651]: I1011 04:52:31.127852 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:31 crc kubenswrapper[4651]: I1011 04:52:31.127874 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:31Z","lastTransitionTime":"2025-10-11T04:52:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:31 crc kubenswrapper[4651]: I1011 04:52:31.230223 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:31 crc kubenswrapper[4651]: I1011 04:52:31.230260 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:31 crc kubenswrapper[4651]: I1011 04:52:31.230269 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:31 crc kubenswrapper[4651]: I1011 04:52:31.230287 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:31 crc kubenswrapper[4651]: I1011 04:52:31.230298 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:31Z","lastTransitionTime":"2025-10-11T04:52:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:31 crc kubenswrapper[4651]: I1011 04:52:31.332487 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:31 crc kubenswrapper[4651]: I1011 04:52:31.332530 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:31 crc kubenswrapper[4651]: I1011 04:52:31.332541 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:31 crc kubenswrapper[4651]: I1011 04:52:31.332561 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:31 crc kubenswrapper[4651]: I1011 04:52:31.332572 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:31Z","lastTransitionTime":"2025-10-11T04:52:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:31 crc kubenswrapper[4651]: I1011 04:52:31.434932 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:31 crc kubenswrapper[4651]: I1011 04:52:31.434995 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:31 crc kubenswrapper[4651]: I1011 04:52:31.435015 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:31 crc kubenswrapper[4651]: I1011 04:52:31.435038 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:31 crc kubenswrapper[4651]: I1011 04:52:31.435054 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:31Z","lastTransitionTime":"2025-10-11T04:52:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:31 crc kubenswrapper[4651]: I1011 04:52:31.537125 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:31 crc kubenswrapper[4651]: I1011 04:52:31.537236 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:31 crc kubenswrapper[4651]: I1011 04:52:31.537249 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:31 crc kubenswrapper[4651]: I1011 04:52:31.537265 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:31 crc kubenswrapper[4651]: I1011 04:52:31.537275 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:31Z","lastTransitionTime":"2025-10-11T04:52:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:31 crc kubenswrapper[4651]: I1011 04:52:31.639582 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:31 crc kubenswrapper[4651]: I1011 04:52:31.639606 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:31 crc kubenswrapper[4651]: I1011 04:52:31.639614 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:31 crc kubenswrapper[4651]: I1011 04:52:31.639628 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:31 crc kubenswrapper[4651]: I1011 04:52:31.639637 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:31Z","lastTransitionTime":"2025-10-11T04:52:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:31 crc kubenswrapper[4651]: I1011 04:52:31.742617 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:31 crc kubenswrapper[4651]: I1011 04:52:31.742690 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:31 crc kubenswrapper[4651]: I1011 04:52:31.742702 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:31 crc kubenswrapper[4651]: I1011 04:52:31.742720 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:31 crc kubenswrapper[4651]: I1011 04:52:31.742732 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:31Z","lastTransitionTime":"2025-10-11T04:52:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:31 crc kubenswrapper[4651]: I1011 04:52:31.844898 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:31 crc kubenswrapper[4651]: I1011 04:52:31.844943 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:31 crc kubenswrapper[4651]: I1011 04:52:31.844955 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:31 crc kubenswrapper[4651]: I1011 04:52:31.844973 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:31 crc kubenswrapper[4651]: I1011 04:52:31.844987 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:31Z","lastTransitionTime":"2025-10-11T04:52:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:31 crc kubenswrapper[4651]: I1011 04:52:31.869122 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tgvv8" Oct 11 04:52:31 crc kubenswrapper[4651]: I1011 04:52:31.869169 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 04:52:31 crc kubenswrapper[4651]: I1011 04:52:31.869317 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 04:52:31 crc kubenswrapper[4651]: E1011 04:52:31.869310 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tgvv8" podUID="a551fed8-58fb-48ae-88af-8dc0cb48fc30" Oct 11 04:52:31 crc kubenswrapper[4651]: E1011 04:52:31.869417 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 04:52:31 crc kubenswrapper[4651]: E1011 04:52:31.869513 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 04:52:31 crc kubenswrapper[4651]: I1011 04:52:31.948236 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:31 crc kubenswrapper[4651]: I1011 04:52:31.948266 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:31 crc kubenswrapper[4651]: I1011 04:52:31.948275 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:31 crc kubenswrapper[4651]: I1011 04:52:31.948290 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:31 crc kubenswrapper[4651]: I1011 04:52:31.948300 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:31Z","lastTransitionTime":"2025-10-11T04:52:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:32 crc kubenswrapper[4651]: I1011 04:52:32.051475 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:32 crc kubenswrapper[4651]: I1011 04:52:32.051550 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:32 crc kubenswrapper[4651]: I1011 04:52:32.051569 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:32 crc kubenswrapper[4651]: I1011 04:52:32.051595 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:32 crc kubenswrapper[4651]: I1011 04:52:32.051613 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:32Z","lastTransitionTime":"2025-10-11T04:52:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:32 crc kubenswrapper[4651]: I1011 04:52:32.154030 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:32 crc kubenswrapper[4651]: I1011 04:52:32.154058 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:32 crc kubenswrapper[4651]: I1011 04:52:32.154066 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:32 crc kubenswrapper[4651]: I1011 04:52:32.154098 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:32 crc kubenswrapper[4651]: I1011 04:52:32.154107 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:32Z","lastTransitionTime":"2025-10-11T04:52:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:32 crc kubenswrapper[4651]: I1011 04:52:32.257210 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:32 crc kubenswrapper[4651]: I1011 04:52:32.257246 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:32 crc kubenswrapper[4651]: I1011 04:52:32.257256 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:32 crc kubenswrapper[4651]: I1011 04:52:32.257272 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:32 crc kubenswrapper[4651]: I1011 04:52:32.257282 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:32Z","lastTransitionTime":"2025-10-11T04:52:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:32 crc kubenswrapper[4651]: I1011 04:52:32.360177 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:32 crc kubenswrapper[4651]: I1011 04:52:32.360221 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:32 crc kubenswrapper[4651]: I1011 04:52:32.360231 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:32 crc kubenswrapper[4651]: I1011 04:52:32.360249 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:32 crc kubenswrapper[4651]: I1011 04:52:32.360262 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:32Z","lastTransitionTime":"2025-10-11T04:52:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:32 crc kubenswrapper[4651]: I1011 04:52:32.462498 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:32 crc kubenswrapper[4651]: I1011 04:52:32.462539 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:32 crc kubenswrapper[4651]: I1011 04:52:32.462550 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:32 crc kubenswrapper[4651]: I1011 04:52:32.462567 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:32 crc kubenswrapper[4651]: I1011 04:52:32.462580 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:32Z","lastTransitionTime":"2025-10-11T04:52:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:32 crc kubenswrapper[4651]: I1011 04:52:32.566826 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:32 crc kubenswrapper[4651]: I1011 04:52:32.566882 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:32 crc kubenswrapper[4651]: I1011 04:52:32.566892 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:32 crc kubenswrapper[4651]: I1011 04:52:32.566908 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:32 crc kubenswrapper[4651]: I1011 04:52:32.566920 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:32Z","lastTransitionTime":"2025-10-11T04:52:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:32 crc kubenswrapper[4651]: I1011 04:52:32.669403 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:32 crc kubenswrapper[4651]: I1011 04:52:32.669450 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:32 crc kubenswrapper[4651]: I1011 04:52:32.669462 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:32 crc kubenswrapper[4651]: I1011 04:52:32.669479 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:32 crc kubenswrapper[4651]: I1011 04:52:32.669492 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:32Z","lastTransitionTime":"2025-10-11T04:52:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:32 crc kubenswrapper[4651]: I1011 04:52:32.771646 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:32 crc kubenswrapper[4651]: I1011 04:52:32.771722 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:32 crc kubenswrapper[4651]: I1011 04:52:32.771748 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:32 crc kubenswrapper[4651]: I1011 04:52:32.771779 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:32 crc kubenswrapper[4651]: I1011 04:52:32.771868 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:32Z","lastTransitionTime":"2025-10-11T04:52:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:32 crc kubenswrapper[4651]: I1011 04:52:32.869187 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 04:52:32 crc kubenswrapper[4651]: E1011 04:52:32.869362 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 04:52:32 crc kubenswrapper[4651]: I1011 04:52:32.874672 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:32 crc kubenswrapper[4651]: I1011 04:52:32.874742 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:32 crc kubenswrapper[4651]: I1011 04:52:32.874766 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:32 crc kubenswrapper[4651]: I1011 04:52:32.874798 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:32 crc kubenswrapper[4651]: I1011 04:52:32.874860 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:32Z","lastTransitionTime":"2025-10-11T04:52:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:32 crc kubenswrapper[4651]: I1011 04:52:32.978238 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:32 crc kubenswrapper[4651]: I1011 04:52:32.983636 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:32 crc kubenswrapper[4651]: I1011 04:52:32.984149 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:32 crc kubenswrapper[4651]: I1011 04:52:32.984362 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:32 crc kubenswrapper[4651]: I1011 04:52:32.984418 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:32Z","lastTransitionTime":"2025-10-11T04:52:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:33 crc kubenswrapper[4651]: I1011 04:52:33.087781 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:33 crc kubenswrapper[4651]: I1011 04:52:33.088195 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:33 crc kubenswrapper[4651]: I1011 04:52:33.088402 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:33 crc kubenswrapper[4651]: I1011 04:52:33.088597 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:33 crc kubenswrapper[4651]: I1011 04:52:33.088902 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:33Z","lastTransitionTime":"2025-10-11T04:52:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:33 crc kubenswrapper[4651]: I1011 04:52:33.191620 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:33 crc kubenswrapper[4651]: I1011 04:52:33.191664 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:33 crc kubenswrapper[4651]: I1011 04:52:33.191673 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:33 crc kubenswrapper[4651]: I1011 04:52:33.191688 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:33 crc kubenswrapper[4651]: I1011 04:52:33.191698 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:33Z","lastTransitionTime":"2025-10-11T04:52:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:33 crc kubenswrapper[4651]: I1011 04:52:33.294240 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:33 crc kubenswrapper[4651]: I1011 04:52:33.294898 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:33 crc kubenswrapper[4651]: I1011 04:52:33.295045 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:33 crc kubenswrapper[4651]: I1011 04:52:33.295144 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:33 crc kubenswrapper[4651]: I1011 04:52:33.295231 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:33Z","lastTransitionTime":"2025-10-11T04:52:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:33 crc kubenswrapper[4651]: I1011 04:52:33.398717 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:33 crc kubenswrapper[4651]: I1011 04:52:33.398751 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:33 crc kubenswrapper[4651]: I1011 04:52:33.398763 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:33 crc kubenswrapper[4651]: I1011 04:52:33.398780 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:33 crc kubenswrapper[4651]: I1011 04:52:33.398792 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:33Z","lastTransitionTime":"2025-10-11T04:52:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:33 crc kubenswrapper[4651]: I1011 04:52:33.501262 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:33 crc kubenswrapper[4651]: I1011 04:52:33.501635 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:33 crc kubenswrapper[4651]: I1011 04:52:33.501860 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:33 crc kubenswrapper[4651]: I1011 04:52:33.502120 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:33 crc kubenswrapper[4651]: I1011 04:52:33.502313 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:33Z","lastTransitionTime":"2025-10-11T04:52:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:33 crc kubenswrapper[4651]: I1011 04:52:33.605264 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:33 crc kubenswrapper[4651]: I1011 04:52:33.605334 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:33 crc kubenswrapper[4651]: I1011 04:52:33.605354 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:33 crc kubenswrapper[4651]: I1011 04:52:33.605379 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:33 crc kubenswrapper[4651]: I1011 04:52:33.605399 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:33Z","lastTransitionTime":"2025-10-11T04:52:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:33 crc kubenswrapper[4651]: I1011 04:52:33.709445 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:33 crc kubenswrapper[4651]: I1011 04:52:33.709859 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:33 crc kubenswrapper[4651]: I1011 04:52:33.710050 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:33 crc kubenswrapper[4651]: I1011 04:52:33.710337 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:33 crc kubenswrapper[4651]: I1011 04:52:33.710588 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:33Z","lastTransitionTime":"2025-10-11T04:52:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:33 crc kubenswrapper[4651]: I1011 04:52:33.813974 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:33 crc kubenswrapper[4651]: I1011 04:52:33.814043 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:33 crc kubenswrapper[4651]: I1011 04:52:33.814060 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:33 crc kubenswrapper[4651]: I1011 04:52:33.814089 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:33 crc kubenswrapper[4651]: I1011 04:52:33.814107 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:33Z","lastTransitionTime":"2025-10-11T04:52:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:33 crc kubenswrapper[4651]: I1011 04:52:33.869208 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tgvv8" Oct 11 04:52:33 crc kubenswrapper[4651]: I1011 04:52:33.869274 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 04:52:33 crc kubenswrapper[4651]: E1011 04:52:33.869387 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tgvv8" podUID="a551fed8-58fb-48ae-88af-8dc0cb48fc30" Oct 11 04:52:33 crc kubenswrapper[4651]: I1011 04:52:33.869440 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 04:52:33 crc kubenswrapper[4651]: E1011 04:52:33.869611 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 04:52:33 crc kubenswrapper[4651]: E1011 04:52:33.869675 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 04:52:33 crc kubenswrapper[4651]: I1011 04:52:33.916845 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:33 crc kubenswrapper[4651]: I1011 04:52:33.916903 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:33 crc kubenswrapper[4651]: I1011 04:52:33.916921 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:33 crc kubenswrapper[4651]: I1011 04:52:33.916951 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:33 crc kubenswrapper[4651]: I1011 04:52:33.916971 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:33Z","lastTransitionTime":"2025-10-11T04:52:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:34 crc kubenswrapper[4651]: I1011 04:52:34.020352 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:34 crc kubenswrapper[4651]: I1011 04:52:34.020405 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:34 crc kubenswrapper[4651]: I1011 04:52:34.020419 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:34 crc kubenswrapper[4651]: I1011 04:52:34.020441 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:34 crc kubenswrapper[4651]: I1011 04:52:34.020456 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:34Z","lastTransitionTime":"2025-10-11T04:52:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:34 crc kubenswrapper[4651]: I1011 04:52:34.122493 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:34 crc kubenswrapper[4651]: I1011 04:52:34.122593 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:34 crc kubenswrapper[4651]: I1011 04:52:34.122614 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:34 crc kubenswrapper[4651]: I1011 04:52:34.122677 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:34 crc kubenswrapper[4651]: I1011 04:52:34.122697 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:34Z","lastTransitionTime":"2025-10-11T04:52:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:34 crc kubenswrapper[4651]: I1011 04:52:34.225791 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:34 crc kubenswrapper[4651]: I1011 04:52:34.225907 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:34 crc kubenswrapper[4651]: I1011 04:52:34.225931 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:34 crc kubenswrapper[4651]: I1011 04:52:34.225959 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:34 crc kubenswrapper[4651]: I1011 04:52:34.225983 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:34Z","lastTransitionTime":"2025-10-11T04:52:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:34 crc kubenswrapper[4651]: I1011 04:52:34.330528 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:34 crc kubenswrapper[4651]: I1011 04:52:34.330581 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:34 crc kubenswrapper[4651]: I1011 04:52:34.330598 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:34 crc kubenswrapper[4651]: I1011 04:52:34.330626 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:34 crc kubenswrapper[4651]: I1011 04:52:34.330646 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:34Z","lastTransitionTime":"2025-10-11T04:52:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:34 crc kubenswrapper[4651]: I1011 04:52:34.433319 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:34 crc kubenswrapper[4651]: I1011 04:52:34.433632 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:34 crc kubenswrapper[4651]: I1011 04:52:34.433696 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:34 crc kubenswrapper[4651]: I1011 04:52:34.433777 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:34 crc kubenswrapper[4651]: I1011 04:52:34.433872 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:34Z","lastTransitionTime":"2025-10-11T04:52:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:34 crc kubenswrapper[4651]: I1011 04:52:34.537229 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:34 crc kubenswrapper[4651]: I1011 04:52:34.537278 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:34 crc kubenswrapper[4651]: I1011 04:52:34.537289 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:34 crc kubenswrapper[4651]: I1011 04:52:34.537338 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:34 crc kubenswrapper[4651]: I1011 04:52:34.537352 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:34Z","lastTransitionTime":"2025-10-11T04:52:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:34 crc kubenswrapper[4651]: I1011 04:52:34.639787 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:34 crc kubenswrapper[4651]: I1011 04:52:34.640207 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:34 crc kubenswrapper[4651]: I1011 04:52:34.640408 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:34 crc kubenswrapper[4651]: I1011 04:52:34.640562 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:34 crc kubenswrapper[4651]: I1011 04:52:34.640696 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:34Z","lastTransitionTime":"2025-10-11T04:52:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:34 crc kubenswrapper[4651]: I1011 04:52:34.743574 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:34 crc kubenswrapper[4651]: I1011 04:52:34.744044 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:34 crc kubenswrapper[4651]: I1011 04:52:34.744220 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:34 crc kubenswrapper[4651]: I1011 04:52:34.744404 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:34 crc kubenswrapper[4651]: I1011 04:52:34.744558 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:34Z","lastTransitionTime":"2025-10-11T04:52:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:34 crc kubenswrapper[4651]: I1011 04:52:34.848970 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:34 crc kubenswrapper[4651]: I1011 04:52:34.849058 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:34 crc kubenswrapper[4651]: I1011 04:52:34.849077 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:34 crc kubenswrapper[4651]: I1011 04:52:34.849107 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:34 crc kubenswrapper[4651]: I1011 04:52:34.849130 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:34Z","lastTransitionTime":"2025-10-11T04:52:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:34 crc kubenswrapper[4651]: I1011 04:52:34.869043 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 04:52:34 crc kubenswrapper[4651]: E1011 04:52:34.869227 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 04:52:34 crc kubenswrapper[4651]: I1011 04:52:34.952236 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:34 crc kubenswrapper[4651]: I1011 04:52:34.952303 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:34 crc kubenswrapper[4651]: I1011 04:52:34.952325 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:34 crc kubenswrapper[4651]: I1011 04:52:34.952356 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:34 crc kubenswrapper[4651]: I1011 04:52:34.952378 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:34Z","lastTransitionTime":"2025-10-11T04:52:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:35 crc kubenswrapper[4651]: I1011 04:52:35.056144 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:35 crc kubenswrapper[4651]: I1011 04:52:35.056199 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:35 crc kubenswrapper[4651]: I1011 04:52:35.056212 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:35 crc kubenswrapper[4651]: I1011 04:52:35.056232 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:35 crc kubenswrapper[4651]: I1011 04:52:35.056246 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:35Z","lastTransitionTime":"2025-10-11T04:52:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:35 crc kubenswrapper[4651]: I1011 04:52:35.159356 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:35 crc kubenswrapper[4651]: I1011 04:52:35.159441 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:35 crc kubenswrapper[4651]: I1011 04:52:35.159467 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:35 crc kubenswrapper[4651]: I1011 04:52:35.159504 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:35 crc kubenswrapper[4651]: I1011 04:52:35.159529 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:35Z","lastTransitionTime":"2025-10-11T04:52:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:35 crc kubenswrapper[4651]: I1011 04:52:35.263743 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:35 crc kubenswrapper[4651]: I1011 04:52:35.263809 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:35 crc kubenswrapper[4651]: I1011 04:52:35.263856 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:35 crc kubenswrapper[4651]: I1011 04:52:35.263885 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:35 crc kubenswrapper[4651]: I1011 04:52:35.263904 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:35Z","lastTransitionTime":"2025-10-11T04:52:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:35 crc kubenswrapper[4651]: I1011 04:52:35.366570 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:35 crc kubenswrapper[4651]: I1011 04:52:35.366640 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:35 crc kubenswrapper[4651]: I1011 04:52:35.366659 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:35 crc kubenswrapper[4651]: I1011 04:52:35.366687 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:35 crc kubenswrapper[4651]: I1011 04:52:35.366714 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:35Z","lastTransitionTime":"2025-10-11T04:52:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:35 crc kubenswrapper[4651]: I1011 04:52:35.469055 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:35 crc kubenswrapper[4651]: I1011 04:52:35.469127 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:35 crc kubenswrapper[4651]: I1011 04:52:35.469147 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:35 crc kubenswrapper[4651]: I1011 04:52:35.469173 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:35 crc kubenswrapper[4651]: I1011 04:52:35.469192 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:35Z","lastTransitionTime":"2025-10-11T04:52:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:35 crc kubenswrapper[4651]: I1011 04:52:35.571856 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:35 crc kubenswrapper[4651]: I1011 04:52:35.571921 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:35 crc kubenswrapper[4651]: I1011 04:52:35.571939 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:35 crc kubenswrapper[4651]: I1011 04:52:35.571969 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:35 crc kubenswrapper[4651]: I1011 04:52:35.571988 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:35Z","lastTransitionTime":"2025-10-11T04:52:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:35 crc kubenswrapper[4651]: I1011 04:52:35.677074 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:35 crc kubenswrapper[4651]: I1011 04:52:35.678905 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:35 crc kubenswrapper[4651]: I1011 04:52:35.679124 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:35 crc kubenswrapper[4651]: I1011 04:52:35.679302 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:35 crc kubenswrapper[4651]: I1011 04:52:35.679538 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:35Z","lastTransitionTime":"2025-10-11T04:52:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:35 crc kubenswrapper[4651]: I1011 04:52:35.783737 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:35 crc kubenswrapper[4651]: I1011 04:52:35.783786 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:35 crc kubenswrapper[4651]: I1011 04:52:35.783794 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:35 crc kubenswrapper[4651]: I1011 04:52:35.783808 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:35 crc kubenswrapper[4651]: I1011 04:52:35.783830 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:35Z","lastTransitionTime":"2025-10-11T04:52:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:35 crc kubenswrapper[4651]: I1011 04:52:35.869578 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tgvv8" Oct 11 04:52:35 crc kubenswrapper[4651]: I1011 04:52:35.869696 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 04:52:35 crc kubenswrapper[4651]: I1011 04:52:35.869584 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 04:52:35 crc kubenswrapper[4651]: E1011 04:52:35.869943 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 04:52:35 crc kubenswrapper[4651]: E1011 04:52:35.869815 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tgvv8" podUID="a551fed8-58fb-48ae-88af-8dc0cb48fc30" Oct 11 04:52:35 crc kubenswrapper[4651]: E1011 04:52:35.870073 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 04:52:35 crc kubenswrapper[4651]: I1011 04:52:35.886480 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:35 crc kubenswrapper[4651]: I1011 04:52:35.886608 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:35 crc kubenswrapper[4651]: I1011 04:52:35.886641 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:35 crc kubenswrapper[4651]: I1011 04:52:35.886667 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:35 crc kubenswrapper[4651]: I1011 04:52:35.886693 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:35Z","lastTransitionTime":"2025-10-11T04:52:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:35 crc kubenswrapper[4651]: I1011 04:52:35.988639 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:35 crc kubenswrapper[4651]: I1011 04:52:35.989062 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:35 crc kubenswrapper[4651]: I1011 04:52:35.989228 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:35 crc kubenswrapper[4651]: I1011 04:52:35.989384 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:35 crc kubenswrapper[4651]: I1011 04:52:35.989528 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:35Z","lastTransitionTime":"2025-10-11T04:52:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:36 crc kubenswrapper[4651]: I1011 04:52:36.092047 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:36 crc kubenswrapper[4651]: I1011 04:52:36.092089 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:36 crc kubenswrapper[4651]: I1011 04:52:36.092099 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:36 crc kubenswrapper[4651]: I1011 04:52:36.092115 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:36 crc kubenswrapper[4651]: I1011 04:52:36.092125 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:36Z","lastTransitionTime":"2025-10-11T04:52:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:36 crc kubenswrapper[4651]: I1011 04:52:36.194548 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:36 crc kubenswrapper[4651]: I1011 04:52:36.194608 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:36 crc kubenswrapper[4651]: I1011 04:52:36.194624 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:36 crc kubenswrapper[4651]: I1011 04:52:36.194648 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:36 crc kubenswrapper[4651]: I1011 04:52:36.194664 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:36Z","lastTransitionTime":"2025-10-11T04:52:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:36 crc kubenswrapper[4651]: I1011 04:52:36.297144 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:36 crc kubenswrapper[4651]: I1011 04:52:36.297213 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:36 crc kubenswrapper[4651]: I1011 04:52:36.297222 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:36 crc kubenswrapper[4651]: I1011 04:52:36.297235 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:36 crc kubenswrapper[4651]: I1011 04:52:36.297242 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:36Z","lastTransitionTime":"2025-10-11T04:52:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:36 crc kubenswrapper[4651]: I1011 04:52:36.400434 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:36 crc kubenswrapper[4651]: I1011 04:52:36.400529 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:36 crc kubenswrapper[4651]: I1011 04:52:36.400573 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:36 crc kubenswrapper[4651]: I1011 04:52:36.400597 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:36 crc kubenswrapper[4651]: I1011 04:52:36.400612 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:36Z","lastTransitionTime":"2025-10-11T04:52:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:36 crc kubenswrapper[4651]: I1011 04:52:36.503785 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:36 crc kubenswrapper[4651]: I1011 04:52:36.503907 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:36 crc kubenswrapper[4651]: I1011 04:52:36.503931 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:36 crc kubenswrapper[4651]: I1011 04:52:36.503953 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:36 crc kubenswrapper[4651]: I1011 04:52:36.503971 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:36Z","lastTransitionTime":"2025-10-11T04:52:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:36 crc kubenswrapper[4651]: I1011 04:52:36.607332 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:36 crc kubenswrapper[4651]: I1011 04:52:36.607451 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:36 crc kubenswrapper[4651]: I1011 04:52:36.607484 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:36 crc kubenswrapper[4651]: I1011 04:52:36.607570 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:36 crc kubenswrapper[4651]: I1011 04:52:36.607597 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:36Z","lastTransitionTime":"2025-10-11T04:52:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:36 crc kubenswrapper[4651]: I1011 04:52:36.710990 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:36 crc kubenswrapper[4651]: I1011 04:52:36.711042 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:36 crc kubenswrapper[4651]: I1011 04:52:36.711055 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:36 crc kubenswrapper[4651]: I1011 04:52:36.711072 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:36 crc kubenswrapper[4651]: I1011 04:52:36.711085 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:36Z","lastTransitionTime":"2025-10-11T04:52:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:36 crc kubenswrapper[4651]: I1011 04:52:36.783723 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:36 crc kubenswrapper[4651]: I1011 04:52:36.783792 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:36 crc kubenswrapper[4651]: I1011 04:52:36.783843 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:36 crc kubenswrapper[4651]: I1011 04:52:36.783879 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:36 crc kubenswrapper[4651]: I1011 04:52:36.783907 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:36Z","lastTransitionTime":"2025-10-11T04:52:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:36 crc kubenswrapper[4651]: E1011 04:52:36.805087 4651 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:36Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:36Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a821a5c3-63e0-43db-82e0-e9c6e98ead52\\\",\\\"systemUUID\\\":\\\"f1c4ea71-0c28-43a7-99a4-e27ff72e186a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:36Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:36 crc kubenswrapper[4651]: I1011 04:52:36.809154 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:36 crc kubenswrapper[4651]: I1011 04:52:36.809194 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:36 crc kubenswrapper[4651]: I1011 04:52:36.809204 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:36 crc kubenswrapper[4651]: I1011 04:52:36.809218 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:36 crc kubenswrapper[4651]: I1011 04:52:36.809227 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:36Z","lastTransitionTime":"2025-10-11T04:52:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:36 crc kubenswrapper[4651]: E1011 04:52:36.823339 4651 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:36Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:36Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a821a5c3-63e0-43db-82e0-e9c6e98ead52\\\",\\\"systemUUID\\\":\\\"f1c4ea71-0c28-43a7-99a4-e27ff72e186a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:36Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:36 crc kubenswrapper[4651]: I1011 04:52:36.827607 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:36 crc kubenswrapper[4651]: I1011 04:52:36.827672 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:36 crc kubenswrapper[4651]: I1011 04:52:36.827694 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:36 crc kubenswrapper[4651]: I1011 04:52:36.827722 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:36 crc kubenswrapper[4651]: I1011 04:52:36.827743 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:36Z","lastTransitionTime":"2025-10-11T04:52:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:36 crc kubenswrapper[4651]: E1011 04:52:36.841266 4651 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:36Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:36Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a821a5c3-63e0-43db-82e0-e9c6e98ead52\\\",\\\"systemUUID\\\":\\\"f1c4ea71-0c28-43a7-99a4-e27ff72e186a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:36Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:36 crc kubenswrapper[4651]: I1011 04:52:36.845215 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:36 crc kubenswrapper[4651]: I1011 04:52:36.845269 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:36 crc kubenswrapper[4651]: I1011 04:52:36.845286 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:36 crc kubenswrapper[4651]: I1011 04:52:36.845310 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:36 crc kubenswrapper[4651]: I1011 04:52:36.845326 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:36Z","lastTransitionTime":"2025-10-11T04:52:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:36 crc kubenswrapper[4651]: E1011 04:52:36.860918 4651 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:36Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:36Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a821a5c3-63e0-43db-82e0-e9c6e98ead52\\\",\\\"systemUUID\\\":\\\"f1c4ea71-0c28-43a7-99a4-e27ff72e186a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:36Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:36 crc kubenswrapper[4651]: I1011 04:52:36.867248 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:36 crc kubenswrapper[4651]: I1011 04:52:36.867309 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:36 crc kubenswrapper[4651]: I1011 04:52:36.867323 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:36 crc kubenswrapper[4651]: I1011 04:52:36.867348 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:36 crc kubenswrapper[4651]: I1011 04:52:36.867366 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:36Z","lastTransitionTime":"2025-10-11T04:52:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:36 crc kubenswrapper[4651]: I1011 04:52:36.868502 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 04:52:36 crc kubenswrapper[4651]: E1011 04:52:36.868668 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 04:52:36 crc kubenswrapper[4651]: I1011 04:52:36.881944 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Oct 11 04:52:36 crc kubenswrapper[4651]: E1011 04:52:36.882069 4651 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:36Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:36Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a821a5c3-63e0-43db-82e0-e9c6e98ead52\\\",\\\"systemUUID\\\":\\\"f1c4ea71-0c28-43a7-99a4-e27ff72e186a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:36Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:36 crc kubenswrapper[4651]: E1011 04:52:36.882221 4651 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 11 04:52:36 crc kubenswrapper[4651]: I1011 04:52:36.884797 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:36 crc kubenswrapper[4651]: I1011 04:52:36.884898 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:36 crc kubenswrapper[4651]: I1011 04:52:36.884918 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:36 crc kubenswrapper[4651]: I1011 04:52:36.884951 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:36 crc kubenswrapper[4651]: I1011 04:52:36.884971 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:36Z","lastTransitionTime":"2025-10-11T04:52:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:36 crc kubenswrapper[4651]: I1011 04:52:36.987247 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:36 crc kubenswrapper[4651]: I1011 04:52:36.987345 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:36 crc kubenswrapper[4651]: I1011 04:52:36.987368 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:36 crc kubenswrapper[4651]: I1011 04:52:36.987399 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:36 crc kubenswrapper[4651]: I1011 04:52:36.987420 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:36Z","lastTransitionTime":"2025-10-11T04:52:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:37 crc kubenswrapper[4651]: I1011 04:52:37.090558 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:37 crc kubenswrapper[4651]: I1011 04:52:37.090613 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:37 crc kubenswrapper[4651]: I1011 04:52:37.090631 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:37 crc kubenswrapper[4651]: I1011 04:52:37.090656 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:37 crc kubenswrapper[4651]: I1011 04:52:37.090675 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:37Z","lastTransitionTime":"2025-10-11T04:52:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:37 crc kubenswrapper[4651]: I1011 04:52:37.192478 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:37 crc kubenswrapper[4651]: I1011 04:52:37.192535 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:37 crc kubenswrapper[4651]: I1011 04:52:37.192549 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:37 crc kubenswrapper[4651]: I1011 04:52:37.192570 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:37 crc kubenswrapper[4651]: I1011 04:52:37.192586 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:37Z","lastTransitionTime":"2025-10-11T04:52:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:37 crc kubenswrapper[4651]: I1011 04:52:37.295658 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:37 crc kubenswrapper[4651]: I1011 04:52:37.295933 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:37 crc kubenswrapper[4651]: I1011 04:52:37.296023 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:37 crc kubenswrapper[4651]: I1011 04:52:37.296136 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:37 crc kubenswrapper[4651]: I1011 04:52:37.296207 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:37Z","lastTransitionTime":"2025-10-11T04:52:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:37 crc kubenswrapper[4651]: I1011 04:52:37.398897 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:37 crc kubenswrapper[4651]: I1011 04:52:37.399236 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:37 crc kubenswrapper[4651]: I1011 04:52:37.399347 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:37 crc kubenswrapper[4651]: I1011 04:52:37.399445 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:37 crc kubenswrapper[4651]: I1011 04:52:37.399522 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:37Z","lastTransitionTime":"2025-10-11T04:52:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:37 crc kubenswrapper[4651]: I1011 04:52:37.502141 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:37 crc kubenswrapper[4651]: I1011 04:52:37.502180 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:37 crc kubenswrapper[4651]: I1011 04:52:37.502189 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:37 crc kubenswrapper[4651]: I1011 04:52:37.502231 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:37 crc kubenswrapper[4651]: I1011 04:52:37.502242 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:37Z","lastTransitionTime":"2025-10-11T04:52:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:37 crc kubenswrapper[4651]: I1011 04:52:37.605182 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:37 crc kubenswrapper[4651]: I1011 04:52:37.605245 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:37 crc kubenswrapper[4651]: I1011 04:52:37.605264 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:37 crc kubenswrapper[4651]: I1011 04:52:37.605289 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:37 crc kubenswrapper[4651]: I1011 04:52:37.605303 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:37Z","lastTransitionTime":"2025-10-11T04:52:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:37 crc kubenswrapper[4651]: I1011 04:52:37.707805 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:37 crc kubenswrapper[4651]: I1011 04:52:37.708090 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:37 crc kubenswrapper[4651]: I1011 04:52:37.708189 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:37 crc kubenswrapper[4651]: I1011 04:52:37.708260 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:37 crc kubenswrapper[4651]: I1011 04:52:37.708318 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:37Z","lastTransitionTime":"2025-10-11T04:52:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:37 crc kubenswrapper[4651]: I1011 04:52:37.810957 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:37 crc kubenswrapper[4651]: I1011 04:52:37.811197 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:37 crc kubenswrapper[4651]: I1011 04:52:37.811366 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:37 crc kubenswrapper[4651]: I1011 04:52:37.811518 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:37 crc kubenswrapper[4651]: I1011 04:52:37.811686 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:37Z","lastTransitionTime":"2025-10-11T04:52:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:37 crc kubenswrapper[4651]: I1011 04:52:37.869130 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 04:52:37 crc kubenswrapper[4651]: I1011 04:52:37.869157 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 04:52:37 crc kubenswrapper[4651]: E1011 04:52:37.869219 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 04:52:37 crc kubenswrapper[4651]: I1011 04:52:37.869130 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tgvv8" Oct 11 04:52:37 crc kubenswrapper[4651]: E1011 04:52:37.869284 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 04:52:37 crc kubenswrapper[4651]: E1011 04:52:37.869519 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tgvv8" podUID="a551fed8-58fb-48ae-88af-8dc0cb48fc30" Oct 11 04:52:37 crc kubenswrapper[4651]: I1011 04:52:37.915275 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:37 crc kubenswrapper[4651]: I1011 04:52:37.915310 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:37 crc kubenswrapper[4651]: I1011 04:52:37.915318 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:37 crc kubenswrapper[4651]: I1011 04:52:37.915332 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:37 crc kubenswrapper[4651]: I1011 04:52:37.915341 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:37Z","lastTransitionTime":"2025-10-11T04:52:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:38 crc kubenswrapper[4651]: I1011 04:52:38.017858 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:38 crc kubenswrapper[4651]: I1011 04:52:38.017980 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:38 crc kubenswrapper[4651]: I1011 04:52:38.018001 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:38 crc kubenswrapper[4651]: I1011 04:52:38.018026 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:38 crc kubenswrapper[4651]: I1011 04:52:38.018043 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:38Z","lastTransitionTime":"2025-10-11T04:52:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:38 crc kubenswrapper[4651]: I1011 04:52:38.120881 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:38 crc kubenswrapper[4651]: I1011 04:52:38.120987 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:38 crc kubenswrapper[4651]: I1011 04:52:38.120997 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:38 crc kubenswrapper[4651]: I1011 04:52:38.121009 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:38 crc kubenswrapper[4651]: I1011 04:52:38.121019 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:38Z","lastTransitionTime":"2025-10-11T04:52:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:38 crc kubenswrapper[4651]: I1011 04:52:38.222806 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:38 crc kubenswrapper[4651]: I1011 04:52:38.223064 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:38 crc kubenswrapper[4651]: I1011 04:52:38.223155 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:38 crc kubenswrapper[4651]: I1011 04:52:38.223244 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:38 crc kubenswrapper[4651]: I1011 04:52:38.223318 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:38Z","lastTransitionTime":"2025-10-11T04:52:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:38 crc kubenswrapper[4651]: I1011 04:52:38.326965 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:38 crc kubenswrapper[4651]: I1011 04:52:38.327207 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:38 crc kubenswrapper[4651]: I1011 04:52:38.327270 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:38 crc kubenswrapper[4651]: I1011 04:52:38.327329 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:38 crc kubenswrapper[4651]: I1011 04:52:38.327383 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:38Z","lastTransitionTime":"2025-10-11T04:52:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:38 crc kubenswrapper[4651]: I1011 04:52:38.430337 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:38 crc kubenswrapper[4651]: I1011 04:52:38.430799 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:38 crc kubenswrapper[4651]: I1011 04:52:38.431018 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:38 crc kubenswrapper[4651]: I1011 04:52:38.431146 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:38 crc kubenswrapper[4651]: I1011 04:52:38.431258 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:38Z","lastTransitionTime":"2025-10-11T04:52:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:38 crc kubenswrapper[4651]: I1011 04:52:38.535221 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:38 crc kubenswrapper[4651]: I1011 04:52:38.535276 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:38 crc kubenswrapper[4651]: I1011 04:52:38.535289 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:38 crc kubenswrapper[4651]: I1011 04:52:38.535313 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:38 crc kubenswrapper[4651]: I1011 04:52:38.535328 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:38Z","lastTransitionTime":"2025-10-11T04:52:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:38 crc kubenswrapper[4651]: I1011 04:52:38.637669 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:38 crc kubenswrapper[4651]: I1011 04:52:38.637894 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:38 crc kubenswrapper[4651]: I1011 04:52:38.637983 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:38 crc kubenswrapper[4651]: I1011 04:52:38.638106 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:38 crc kubenswrapper[4651]: I1011 04:52:38.638175 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:38Z","lastTransitionTime":"2025-10-11T04:52:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:38 crc kubenswrapper[4651]: I1011 04:52:38.740570 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:38 crc kubenswrapper[4651]: I1011 04:52:38.740886 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:38 crc kubenswrapper[4651]: I1011 04:52:38.740986 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:38 crc kubenswrapper[4651]: I1011 04:52:38.741100 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:38 crc kubenswrapper[4651]: I1011 04:52:38.741192 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:38Z","lastTransitionTime":"2025-10-11T04:52:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:38 crc kubenswrapper[4651]: I1011 04:52:38.843813 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:38 crc kubenswrapper[4651]: I1011 04:52:38.843873 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:38 crc kubenswrapper[4651]: I1011 04:52:38.843884 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:38 crc kubenswrapper[4651]: I1011 04:52:38.843898 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:38 crc kubenswrapper[4651]: I1011 04:52:38.843908 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:38Z","lastTransitionTime":"2025-10-11T04:52:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:38 crc kubenswrapper[4651]: I1011 04:52:38.868707 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 04:52:38 crc kubenswrapper[4651]: E1011 04:52:38.868896 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 04:52:38 crc kubenswrapper[4651]: I1011 04:52:38.946534 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:38 crc kubenswrapper[4651]: I1011 04:52:38.946574 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:38 crc kubenswrapper[4651]: I1011 04:52:38.946586 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:38 crc kubenswrapper[4651]: I1011 04:52:38.946600 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:38 crc kubenswrapper[4651]: I1011 04:52:38.946611 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:38Z","lastTransitionTime":"2025-10-11T04:52:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:39 crc kubenswrapper[4651]: I1011 04:52:39.049353 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:39 crc kubenswrapper[4651]: I1011 04:52:39.049395 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:39 crc kubenswrapper[4651]: I1011 04:52:39.049405 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:39 crc kubenswrapper[4651]: I1011 04:52:39.049420 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:39 crc kubenswrapper[4651]: I1011 04:52:39.049446 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:39Z","lastTransitionTime":"2025-10-11T04:52:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:39 crc kubenswrapper[4651]: I1011 04:52:39.152159 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:39 crc kubenswrapper[4651]: I1011 04:52:39.152213 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:39 crc kubenswrapper[4651]: I1011 04:52:39.152226 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:39 crc kubenswrapper[4651]: I1011 04:52:39.152273 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:39 crc kubenswrapper[4651]: I1011 04:52:39.152287 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:39Z","lastTransitionTime":"2025-10-11T04:52:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:39 crc kubenswrapper[4651]: I1011 04:52:39.254328 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:39 crc kubenswrapper[4651]: I1011 04:52:39.254366 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:39 crc kubenswrapper[4651]: I1011 04:52:39.254378 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:39 crc kubenswrapper[4651]: I1011 04:52:39.254391 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:39 crc kubenswrapper[4651]: I1011 04:52:39.254401 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:39Z","lastTransitionTime":"2025-10-11T04:52:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:39 crc kubenswrapper[4651]: I1011 04:52:39.357143 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:39 crc kubenswrapper[4651]: I1011 04:52:39.357181 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:39 crc kubenswrapper[4651]: I1011 04:52:39.357190 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:39 crc kubenswrapper[4651]: I1011 04:52:39.357204 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:39 crc kubenswrapper[4651]: I1011 04:52:39.357213 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:39Z","lastTransitionTime":"2025-10-11T04:52:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:39 crc kubenswrapper[4651]: I1011 04:52:39.459883 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:39 crc kubenswrapper[4651]: I1011 04:52:39.459925 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:39 crc kubenswrapper[4651]: I1011 04:52:39.459934 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:39 crc kubenswrapper[4651]: I1011 04:52:39.459949 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:39 crc kubenswrapper[4651]: I1011 04:52:39.459981 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:39Z","lastTransitionTime":"2025-10-11T04:52:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:39 crc kubenswrapper[4651]: I1011 04:52:39.562492 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:39 crc kubenswrapper[4651]: I1011 04:52:39.562550 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:39 crc kubenswrapper[4651]: I1011 04:52:39.562567 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:39 crc kubenswrapper[4651]: I1011 04:52:39.562587 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:39 crc kubenswrapper[4651]: I1011 04:52:39.562601 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:39Z","lastTransitionTime":"2025-10-11T04:52:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:39 crc kubenswrapper[4651]: I1011 04:52:39.665328 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:39 crc kubenswrapper[4651]: I1011 04:52:39.665370 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:39 crc kubenswrapper[4651]: I1011 04:52:39.665383 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:39 crc kubenswrapper[4651]: I1011 04:52:39.665402 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:39 crc kubenswrapper[4651]: I1011 04:52:39.665414 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:39Z","lastTransitionTime":"2025-10-11T04:52:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:39 crc kubenswrapper[4651]: I1011 04:52:39.768599 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:39 crc kubenswrapper[4651]: I1011 04:52:39.768705 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:39 crc kubenswrapper[4651]: I1011 04:52:39.768734 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:39 crc kubenswrapper[4651]: I1011 04:52:39.768772 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:39 crc kubenswrapper[4651]: I1011 04:52:39.768798 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:39Z","lastTransitionTime":"2025-10-11T04:52:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:39 crc kubenswrapper[4651]: I1011 04:52:39.869105 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 04:52:39 crc kubenswrapper[4651]: I1011 04:52:39.869149 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 04:52:39 crc kubenswrapper[4651]: E1011 04:52:39.869284 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 04:52:39 crc kubenswrapper[4651]: I1011 04:52:39.869557 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tgvv8" Oct 11 04:52:39 crc kubenswrapper[4651]: I1011 04:52:39.870239 4651 scope.go:117] "RemoveContainer" containerID="e0575c5b690a6b170f1e7dbb71dc3d09d4dbb86071a2363d6d7c4c85e2c78320" Oct 11 04:52:39 crc kubenswrapper[4651]: E1011 04:52:39.870623 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tgvv8" podUID="a551fed8-58fb-48ae-88af-8dc0cb48fc30" Oct 11 04:52:39 crc kubenswrapper[4651]: E1011 04:52:39.870868 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 04:52:39 crc kubenswrapper[4651]: I1011 04:52:39.871153 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:39 crc kubenswrapper[4651]: I1011 04:52:39.871192 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:39 crc kubenswrapper[4651]: I1011 04:52:39.871204 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:39 crc kubenswrapper[4651]: I1011 04:52:39.871221 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:39 crc kubenswrapper[4651]: I1011 04:52:39.871237 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:39Z","lastTransitionTime":"2025-10-11T04:52:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:39 crc kubenswrapper[4651]: I1011 04:52:39.888154 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d425a775a5e278ee77231764e58cac3441e04c383eee54f69ba84488ca640eae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:39Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:39 crc kubenswrapper[4651]: I1011 04:52:39.906923 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55e4c9c2d46f67bb0f8ccdb2de27dc82f0280476613b3b30d587ff6f5784e787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:39Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:39 crc kubenswrapper[4651]: I1011 04:52:39.922607 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e5b8526fed77f2e541d6793dc36a902daebce60d367f28f9efb45f7fabb44fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e278c9a5d14f026733737226a6ffc91202b760e48622d5d0031678981fa5f857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:39Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:39 crc kubenswrapper[4651]: I1011 04:52:39.942529 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47b36edf-f918-4105-bb64-66bb71a4b8c4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9521c3af1580aa2a62fc760916de9dc4abdc437700567a6ae5df3a96da2e8942\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6fdcb798e11fca3b7b427f5ef0ac955a893709ffedaa472aaab6c219994e940\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff57b9678d77f617cfc792258091526c2a2f8125c240abfe0b15f693abc948eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11e12782179b96dac12bfdd3a25526e93f18da255e2d2fb0d7522e6d9c373b4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e14956f18479aa61fa0755b9c6c4f26ba7ac18f3023e177a76b9a59101b37c1\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-11T04:51:33Z\\\",\\\"message\\\":\\\"W1011 04:51:22.963111 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1011 04:51:22.963696 1 crypto.go:601] Generating new CA for check-endpoints-signer@1760158282 cert, and key in /tmp/serving-cert-3362973750/serving-signer.crt, /tmp/serving-cert-3362973750/serving-signer.key\\\\nI1011 04:51:23.197526 1 observer_polling.go:159] Starting file observer\\\\nW1011 04:51:23.200452 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1011 04:51:23.200572 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1011 04:51:23.202397 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3362973750/tls.crt::/tmp/serving-cert-3362973750/tls.key\\\\\\\"\\\\nF1011 04:51:33.422520 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f8633747abf191f040cb14f5a76529487743229338070aeca597fc93dae8b3f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55c5a1cac9b1045ca931771e5a096d29e8d986a4d9baae8d779d94f2a30b6cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55c5a1cac9b1045ca931771e5a096d29e8d986a4d9baae8d779d94f2a30b6cd3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:39Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:39 crc kubenswrapper[4651]: I1011 04:52:39.961597 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"744aa461-2868-440d-a1b9-6d30c0c50b56\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aabba06dd47ad528f830a47cc79ddabb3789d24ebed62a6e8f976df1b156ef1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb996f17546fc77ac1b8ed27428aaa6060822daa0156cf016dbb24ed278403a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76ee2612930be6596c0186cc000d80039b52e225fd64102002ed9316a0605521\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c6263dc47a8284e9849da1fb15c4788174252f3637665ac6e4b19298ca90c587\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:39Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:39 crc kubenswrapper[4651]: I1011 04:52:39.977855 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8dacbe38-be95-4b56-a204-f87d2e8d6496\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://918c77155c7ad4e14a9706e6e36a26cf2c774133b3435468d326b1b8c1f29f45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf6183b5381ec04f62d32175471097bd2d2088003063202375b88ccfb9080fae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2aaeb6d47f2435f2865c7516d976fecaf6de20b458b5cdcea1cdf59449cdef9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://96c869e21e725921f47799368a3327f628bbdd7d7db8b4d0f29bf27b4d04551b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96c869e21e725921f47799368a3327f628bbdd7d7db8b4d0f29bf27b4d04551b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:19Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:39Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:39 crc kubenswrapper[4651]: I1011 04:52:39.978262 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:39 crc kubenswrapper[4651]: I1011 04:52:39.978572 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:39 crc kubenswrapper[4651]: I1011 04:52:39.978762 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:39 crc kubenswrapper[4651]: I1011 04:52:39.979377 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:39 crc kubenswrapper[4651]: I1011 04:52:39.979760 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:39Z","lastTransitionTime":"2025-10-11T04:52:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:39 crc kubenswrapper[4651]: I1011 04:52:39.997154 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:39Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:40 crc kubenswrapper[4651]: I1011 04:52:40.015518 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"519a1ae1-e964-48b0-8b61-835146df28c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f12e36a570de649e1df8107ea828cd8e65c18e111de191d3796fc1f3134e43c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wkk99\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://583486a7d6efd8d25aaeac78ea92f84834730c641516b6ade77aeda2baef58e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wkk99\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-78jnv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:40Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:40 crc kubenswrapper[4651]: I1011 04:52:40.036120 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wz4hw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fbfdd781-994b-49b4-9c8e-edc0ea4145d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://453b8d5da6858078639895d4d19bb1783eefeeb7c558eec6984f6ebdadd5d8fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d37b5524c49533e7dad8b61f0e20b7d1e37b7ae1afc5b9bfe9146a0744202a2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-11T04:52:27Z\\\",\\\"message\\\":\\\"2025-10-11T04:51:42+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_6af57944-ba42-4ed4-a4aa-c5a5d8283647\\\\n2025-10-11T04:51:42+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_6af57944-ba42-4ed4-a4aa-c5a5d8283647 to /host/opt/cni/bin/\\\\n2025-10-11T04:51:42Z [verbose] multus-daemon started\\\\n2025-10-11T04:51:42Z [verbose] Readiness Indicator file check\\\\n2025-10-11T04:52:27Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:52:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c96nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wz4hw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:40Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:40 crc kubenswrapper[4651]: I1011 04:52:40.050502 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"499d68ba-8709-4a00-a42b-ccd984f55fa7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0795ced06735c12d7560b7563d12ebbda3afb26aab35978a6f8b42216eeb1cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dba40c78d42cfd10414df43e27860b89083b6988bc0f0375c286af81e8a7ef32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dba40c78d42cfd10414df43e27860b89083b6988bc0f0375c286af81e8a7ef32\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:19Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:40Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:40 crc kubenswrapper[4651]: I1011 04:52:40.062027 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-phsgk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e8fb74d-013d-4103-b029-e8416d079dcf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b7407b7da9820bbcdac4ce9c49c4520834a466b0db187c5f861972713ba1583f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4ftf8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-phsgk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:40Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:40 crc kubenswrapper[4651]: I1011 04:52:40.075248 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:40Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:40 crc kubenswrapper[4651]: I1011 04:52:40.085424 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-tgvv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a551fed8-58fb-48ae-88af-8dc0cb48fc30\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:53Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:53Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:53Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nphhq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nphhq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:53Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-tgvv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:40Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:40 crc kubenswrapper[4651]: I1011 04:52:40.086401 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:40 crc kubenswrapper[4651]: I1011 04:52:40.086465 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:40 crc kubenswrapper[4651]: I1011 04:52:40.086475 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:40 crc kubenswrapper[4651]: I1011 04:52:40.086489 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:40 crc kubenswrapper[4651]: I1011 04:52:40.086498 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:40Z","lastTransitionTime":"2025-10-11T04:52:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:40 crc kubenswrapper[4651]: I1011 04:52:40.102223 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:40Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:40 crc kubenswrapper[4651]: I1011 04:52:40.116320 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kwhmr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbcac3cb-b774-47ec-a86c-b22191d14d99\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52a0f9e11cf4e8c69d5b356b702ce9df8cbc935daadadf531004a23e5f6dad65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7n87\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kwhmr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:40Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:40 crc kubenswrapper[4651]: I1011 04:52:40.137588 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0fd0aaae2760a1c934aed54b8e07528f0f78db1149c9e1f8674bec90b61a7078\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://788d6a46d740de869fe7cd1d28479f8115a3515c7bd2482e06cbc4dba7b27fac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68954082d4369b9b4d95ba1f15e19593dc330b698562be413055b2ba012b5f8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3eed8335754437c935f07223bf4c4f40f9ab0bbdc9a86b7610f7f6fd55140e37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23b3fde15e637081cbf5fad2e55ccea1f0fe63d39ff5f82a60ce05ffeef9a837\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99f927067aabe4690e99fe50a069afdf68b22d950a8ae141235571ced468c44e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0575c5b690a6b170f1e7dbb71dc3d09d4dbb86071a2363d6d7c4c85e2c78320\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0575c5b690a6b170f1e7dbb71dc3d09d4dbb86071a2363d6d7c4c85e2c78320\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-11T04:52:10Z\\\",\\\"message\\\":\\\"andler 8\\\\nI1011 04:52:10.796228 6293 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1011 04:52:10.796255 6293 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1011 04:52:10.796280 6293 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1011 04:52:10.796326 6293 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1011 04:52:10.796340 6293 handler.go:208] Removed *v1.Node event handler 2\\\\nI1011 04:52:10.796359 6293 handler.go:208] Removed *v1.Node event handler 7\\\\nI1011 04:52:10.796378 6293 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI1011 04:52:10.796447 6293 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1011 04:52:10.796476 6293 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI1011 04:52:10.796536 6293 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1011 04:52:10.796553 6293 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1011 04:52:10.796592 6293 factory.go:656] Stopping watch factory\\\\nI1011 04:52:10.796634 6293 ovnkube.go:599] Stopped ovnkube\\\\nI1011 04:52:10.796638 6293 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1011 04:52:10.796674 6293 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1011 04:52:10.796792 6293 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T04:52:10Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-6zt9s_openshift-ovn-kubernetes(28e01c08-a461-4f44-a49c-4bf92fd3a2ce)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://add3ed68e0bf59bc569248a3660cfc9a20995641ec65b0bf7133e47c366aad73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccd6180f4c5665d62ee9458795318ef2474c1938d19e6cbfec9c2174fe10e020\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ccd6180f4c5665d62ee9458795318ef2474c1938d19e6cbfec9c2174fe10e020\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6zt9s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:40Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:40 crc kubenswrapper[4651]: I1011 04:52:40.153632 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pgwvb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"215170a8-84a9-4e15-9b0e-c1200c680f30\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a036609457cdb5a74a4ccf65ffa2e502f32994e6e3f1ef8a4fd4b89c5454773\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c80b1c5385cb92e140c43bc982ee7cda6406eac9dfe8d9449eaf45332ccc506\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c80b1c5385cb92e140c43bc982ee7cda6406eac9dfe8d9449eaf45332ccc506\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1575fc8fd03d4047a3ed1c0c9df99b77e40296b876d2913a790128f39733d45e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1575fc8fd03d4047a3ed1c0c9df99b77e40296b876d2913a790128f39733d45e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://626e511c110034060639be8565505dc122e834fc63a76d9c4e21dd1dbd5d5a62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://626e511c110034060639be8565505dc122e834fc63a76d9c4e21dd1dbd5d5a62\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9d55cd3f36dcad738ffd05b10e3f665c6bbb949c25365340e70b3c4b53e4175\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9d55cd3f36dcad738ffd05b10e3f665c6bbb949c25365340e70b3c4b53e4175\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27f664812354e7e2ea5feaa075c7f9f72262ddf0a7959196f7a7b14be5af1924\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://27f664812354e7e2ea5feaa075c7f9f72262ddf0a7959196f7a7b14be5af1924\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3b9eccb883d262047b87eae2358f6d27cbd8df5b6917125a8a6259b2eb3433a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3b9eccb883d262047b87eae2358f6d27cbd8df5b6917125a8a6259b2eb3433a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pgwvb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:40Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:40 crc kubenswrapper[4651]: I1011 04:52:40.167943 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qsgwc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f5830d7-3c6f-48f0-b103-a228e7f8e448\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c307ee086caacd0a5299325126ff0b4b140aa1e18153b8f66696534543bb069\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pbjvz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e30f3d9db8e89696a8a98ab24ca995088afa72c957e19ca59ace228a23268d9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pbjvz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:52Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qsgwc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:40Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:40 crc kubenswrapper[4651]: I1011 04:52:40.189676 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:40 crc kubenswrapper[4651]: I1011 04:52:40.189730 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:40 crc kubenswrapper[4651]: I1011 04:52:40.189741 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:40 crc kubenswrapper[4651]: I1011 04:52:40.189763 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:40 crc kubenswrapper[4651]: I1011 04:52:40.189777 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:40Z","lastTransitionTime":"2025-10-11T04:52:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:40 crc kubenswrapper[4651]: I1011 04:52:40.291782 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:40 crc kubenswrapper[4651]: I1011 04:52:40.291846 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:40 crc kubenswrapper[4651]: I1011 04:52:40.291861 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:40 crc kubenswrapper[4651]: I1011 04:52:40.291880 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:40 crc kubenswrapper[4651]: I1011 04:52:40.291892 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:40Z","lastTransitionTime":"2025-10-11T04:52:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:40 crc kubenswrapper[4651]: I1011 04:52:40.335237 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6zt9s_28e01c08-a461-4f44-a49c-4bf92fd3a2ce/ovnkube-controller/2.log" Oct 11 04:52:40 crc kubenswrapper[4651]: I1011 04:52:40.339067 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" event={"ID":"28e01c08-a461-4f44-a49c-4bf92fd3a2ce","Type":"ContainerStarted","Data":"bedbf9638749a3e9eca8260d325aaa6064bbb0ce8ea6e1cd6042e0039175c968"} Oct 11 04:52:40 crc kubenswrapper[4651]: I1011 04:52:40.339507 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" Oct 11 04:52:40 crc kubenswrapper[4651]: I1011 04:52:40.351587 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qsgwc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f5830d7-3c6f-48f0-b103-a228e7f8e448\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c307ee086caacd0a5299325126ff0b4b140aa1e18153b8f66696534543bb069\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pbjvz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e30f3d9db8e89696a8a98ab24ca995088afa72c957e19ca59ace228a23268d9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pbjvz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:52Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qsgwc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:40Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:40 crc kubenswrapper[4651]: I1011 04:52:40.363744 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:40Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:40 crc kubenswrapper[4651]: I1011 04:52:40.373995 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kwhmr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbcac3cb-b774-47ec-a86c-b22191d14d99\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52a0f9e11cf4e8c69d5b356b702ce9df8cbc935daadadf531004a23e5f6dad65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7n87\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kwhmr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:40Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:40 crc kubenswrapper[4651]: I1011 04:52:40.390686 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0fd0aaae2760a1c934aed54b8e07528f0f78db1149c9e1f8674bec90b61a7078\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://788d6a46d740de869fe7cd1d28479f8115a3515c7bd2482e06cbc4dba7b27fac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68954082d4369b9b4d95ba1f15e19593dc330b698562be413055b2ba012b5f8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3eed8335754437c935f07223bf4c4f40f9ab0bbdc9a86b7610f7f6fd55140e37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23b3fde15e637081cbf5fad2e55ccea1f0fe63d39ff5f82a60ce05ffeef9a837\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99f927067aabe4690e99fe50a069afdf68b22d950a8ae141235571ced468c44e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bedbf9638749a3e9eca8260d325aaa6064bbb0ce8ea6e1cd6042e0039175c968\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0575c5b690a6b170f1e7dbb71dc3d09d4dbb86071a2363d6d7c4c85e2c78320\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-11T04:52:10Z\\\",\\\"message\\\":\\\"andler 8\\\\nI1011 04:52:10.796228 6293 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1011 04:52:10.796255 6293 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1011 04:52:10.796280 6293 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1011 04:52:10.796326 6293 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1011 04:52:10.796340 6293 handler.go:208] Removed *v1.Node event handler 2\\\\nI1011 04:52:10.796359 6293 handler.go:208] Removed *v1.Node event handler 7\\\\nI1011 04:52:10.796378 6293 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI1011 04:52:10.796447 6293 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1011 04:52:10.796476 6293 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI1011 04:52:10.796536 6293 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1011 04:52:10.796553 6293 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1011 04:52:10.796592 6293 factory.go:656] Stopping watch factory\\\\nI1011 04:52:10.796634 6293 ovnkube.go:599] Stopped ovnkube\\\\nI1011 04:52:10.796638 6293 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1011 04:52:10.796674 6293 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1011 04:52:10.796792 6293 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T04:52:10Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:52:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://add3ed68e0bf59bc569248a3660cfc9a20995641ec65b0bf7133e47c366aad73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccd6180f4c5665d62ee9458795318ef2474c1938d19e6cbfec9c2174fe10e020\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ccd6180f4c5665d62ee9458795318ef2474c1938d19e6cbfec9c2174fe10e020\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6zt9s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:40Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:40 crc kubenswrapper[4651]: I1011 04:52:40.394561 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:40 crc kubenswrapper[4651]: I1011 04:52:40.394607 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:40 crc kubenswrapper[4651]: I1011 04:52:40.394616 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:40 crc kubenswrapper[4651]: I1011 04:52:40.394633 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:40 crc kubenswrapper[4651]: I1011 04:52:40.394646 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:40Z","lastTransitionTime":"2025-10-11T04:52:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:40 crc kubenswrapper[4651]: I1011 04:52:40.409204 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pgwvb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"215170a8-84a9-4e15-9b0e-c1200c680f30\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a036609457cdb5a74a4ccf65ffa2e502f32994e6e3f1ef8a4fd4b89c5454773\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c80b1c5385cb92e140c43bc982ee7cda6406eac9dfe8d9449eaf45332ccc506\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c80b1c5385cb92e140c43bc982ee7cda6406eac9dfe8d9449eaf45332ccc506\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1575fc8fd03d4047a3ed1c0c9df99b77e40296b876d2913a790128f39733d45e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1575fc8fd03d4047a3ed1c0c9df99b77e40296b876d2913a790128f39733d45e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://626e511c110034060639be8565505dc122e834fc63a76d9c4e21dd1dbd5d5a62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://626e511c110034060639be8565505dc122e834fc63a76d9c4e21dd1dbd5d5a62\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9d55cd3f36dcad738ffd05b10e3f665c6bbb949c25365340e70b3c4b53e4175\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9d55cd3f36dcad738ffd05b10e3f665c6bbb949c25365340e70b3c4b53e4175\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27f664812354e7e2ea5feaa075c7f9f72262ddf0a7959196f7a7b14be5af1924\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://27f664812354e7e2ea5feaa075c7f9f72262ddf0a7959196f7a7b14be5af1924\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3b9eccb883d262047b87eae2358f6d27cbd8df5b6917125a8a6259b2eb3433a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3b9eccb883d262047b87eae2358f6d27cbd8df5b6917125a8a6259b2eb3433a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pgwvb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:40Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:40 crc kubenswrapper[4651]: I1011 04:52:40.426569 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d425a775a5e278ee77231764e58cac3441e04c383eee54f69ba84488ca640eae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:40Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:40 crc kubenswrapper[4651]: I1011 04:52:40.443668 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55e4c9c2d46f67bb0f8ccdb2de27dc82f0280476613b3b30d587ff6f5784e787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:40Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:40 crc kubenswrapper[4651]: I1011 04:52:40.460354 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e5b8526fed77f2e541d6793dc36a902daebce60d367f28f9efb45f7fabb44fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e278c9a5d14f026733737226a6ffc91202b760e48622d5d0031678981fa5f857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:40Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:40 crc kubenswrapper[4651]: I1011 04:52:40.472527 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:40Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:40 crc kubenswrapper[4651]: I1011 04:52:40.483483 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"519a1ae1-e964-48b0-8b61-835146df28c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f12e36a570de649e1df8107ea828cd8e65c18e111de191d3796fc1f3134e43c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wkk99\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://583486a7d6efd8d25aaeac78ea92f84834730c641516b6ade77aeda2baef58e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wkk99\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-78jnv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:40Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:40 crc kubenswrapper[4651]: I1011 04:52:40.497423 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wz4hw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fbfdd781-994b-49b4-9c8e-edc0ea4145d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://453b8d5da6858078639895d4d19bb1783eefeeb7c558eec6984f6ebdadd5d8fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d37b5524c49533e7dad8b61f0e20b7d1e37b7ae1afc5b9bfe9146a0744202a2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-11T04:52:27Z\\\",\\\"message\\\":\\\"2025-10-11T04:51:42+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_6af57944-ba42-4ed4-a4aa-c5a5d8283647\\\\n2025-10-11T04:51:42+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_6af57944-ba42-4ed4-a4aa-c5a5d8283647 to /host/opt/cni/bin/\\\\n2025-10-11T04:51:42Z [verbose] multus-daemon started\\\\n2025-10-11T04:51:42Z [verbose] Readiness Indicator file check\\\\n2025-10-11T04:52:27Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:52:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c96nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wz4hw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:40Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:40 crc kubenswrapper[4651]: I1011 04:52:40.505443 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:40 crc kubenswrapper[4651]: I1011 04:52:40.505482 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:40 crc kubenswrapper[4651]: I1011 04:52:40.505493 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:40 crc kubenswrapper[4651]: I1011 04:52:40.505510 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:40 crc kubenswrapper[4651]: I1011 04:52:40.505521 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:40Z","lastTransitionTime":"2025-10-11T04:52:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:40 crc kubenswrapper[4651]: I1011 04:52:40.511236 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47b36edf-f918-4105-bb64-66bb71a4b8c4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9521c3af1580aa2a62fc760916de9dc4abdc437700567a6ae5df3a96da2e8942\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6fdcb798e11fca3b7b427f5ef0ac955a893709ffedaa472aaab6c219994e940\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff57b9678d77f617cfc792258091526c2a2f8125c240abfe0b15f693abc948eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11e12782179b96dac12bfdd3a25526e93f18da255e2d2fb0d7522e6d9c373b4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e14956f18479aa61fa0755b9c6c4f26ba7ac18f3023e177a76b9a59101b37c1\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-11T04:51:33Z\\\",\\\"message\\\":\\\"W1011 04:51:22.963111 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1011 04:51:22.963696 1 crypto.go:601] Generating new CA for check-endpoints-signer@1760158282 cert, and key in /tmp/serving-cert-3362973750/serving-signer.crt, /tmp/serving-cert-3362973750/serving-signer.key\\\\nI1011 04:51:23.197526 1 observer_polling.go:159] Starting file observer\\\\nW1011 04:51:23.200452 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1011 04:51:23.200572 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1011 04:51:23.202397 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3362973750/tls.crt::/tmp/serving-cert-3362973750/tls.key\\\\\\\"\\\\nF1011 04:51:33.422520 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f8633747abf191f040cb14f5a76529487743229338070aeca597fc93dae8b3f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55c5a1cac9b1045ca931771e5a096d29e8d986a4d9baae8d779d94f2a30b6cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55c5a1cac9b1045ca931771e5a096d29e8d986a4d9baae8d779d94f2a30b6cd3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:40Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:40 crc kubenswrapper[4651]: I1011 04:52:40.524898 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"744aa461-2868-440d-a1b9-6d30c0c50b56\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aabba06dd47ad528f830a47cc79ddabb3789d24ebed62a6e8f976df1b156ef1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb996f17546fc77ac1b8ed27428aaa6060822daa0156cf016dbb24ed278403a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76ee2612930be6596c0186cc000d80039b52e225fd64102002ed9316a0605521\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c6263dc47a8284e9849da1fb15c4788174252f3637665ac6e4b19298ca90c587\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:40Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:40 crc kubenswrapper[4651]: I1011 04:52:40.536722 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8dacbe38-be95-4b56-a204-f87d2e8d6496\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://918c77155c7ad4e14a9706e6e36a26cf2c774133b3435468d326b1b8c1f29f45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf6183b5381ec04f62d32175471097bd2d2088003063202375b88ccfb9080fae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2aaeb6d47f2435f2865c7516d976fecaf6de20b458b5cdcea1cdf59449cdef9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://96c869e21e725921f47799368a3327f628bbdd7d7db8b4d0f29bf27b4d04551b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96c869e21e725921f47799368a3327f628bbdd7d7db8b4d0f29bf27b4d04551b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:19Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:40Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:40 crc kubenswrapper[4651]: I1011 04:52:40.550759 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"499d68ba-8709-4a00-a42b-ccd984f55fa7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0795ced06735c12d7560b7563d12ebbda3afb26aab35978a6f8b42216eeb1cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dba40c78d42cfd10414df43e27860b89083b6988bc0f0375c286af81e8a7ef32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dba40c78d42cfd10414df43e27860b89083b6988bc0f0375c286af81e8a7ef32\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:19Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:40Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:40 crc kubenswrapper[4651]: I1011 04:52:40.563341 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-phsgk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e8fb74d-013d-4103-b029-e8416d079dcf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b7407b7da9820bbcdac4ce9c49c4520834a466b0db187c5f861972713ba1583f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4ftf8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-phsgk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:40Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:40 crc kubenswrapper[4651]: I1011 04:52:40.579745 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:40Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:40 crc kubenswrapper[4651]: I1011 04:52:40.591630 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-tgvv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a551fed8-58fb-48ae-88af-8dc0cb48fc30\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:53Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:53Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:53Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nphhq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nphhq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:53Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-tgvv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:40Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:40 crc kubenswrapper[4651]: I1011 04:52:40.608766 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:40 crc kubenswrapper[4651]: I1011 04:52:40.608843 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:40 crc kubenswrapper[4651]: I1011 04:52:40.608855 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:40 crc kubenswrapper[4651]: I1011 04:52:40.608875 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:40 crc kubenswrapper[4651]: I1011 04:52:40.608888 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:40Z","lastTransitionTime":"2025-10-11T04:52:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:40 crc kubenswrapper[4651]: I1011 04:52:40.711061 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:40 crc kubenswrapper[4651]: I1011 04:52:40.711102 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:40 crc kubenswrapper[4651]: I1011 04:52:40.711111 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:40 crc kubenswrapper[4651]: I1011 04:52:40.711126 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:40 crc kubenswrapper[4651]: I1011 04:52:40.711135 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:40Z","lastTransitionTime":"2025-10-11T04:52:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:40 crc kubenswrapper[4651]: I1011 04:52:40.814951 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:40 crc kubenswrapper[4651]: I1011 04:52:40.815014 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:40 crc kubenswrapper[4651]: I1011 04:52:40.815025 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:40 crc kubenswrapper[4651]: I1011 04:52:40.815044 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:40 crc kubenswrapper[4651]: I1011 04:52:40.815056 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:40Z","lastTransitionTime":"2025-10-11T04:52:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:40 crc kubenswrapper[4651]: I1011 04:52:40.869440 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 04:52:40 crc kubenswrapper[4651]: E1011 04:52:40.869597 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 04:52:40 crc kubenswrapper[4651]: I1011 04:52:40.918339 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:40 crc kubenswrapper[4651]: I1011 04:52:40.918409 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:40 crc kubenswrapper[4651]: I1011 04:52:40.918424 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:40 crc kubenswrapper[4651]: I1011 04:52:40.918444 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:40 crc kubenswrapper[4651]: I1011 04:52:40.918458 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:40Z","lastTransitionTime":"2025-10-11T04:52:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:41 crc kubenswrapper[4651]: I1011 04:52:41.021403 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:41 crc kubenswrapper[4651]: I1011 04:52:41.021459 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:41 crc kubenswrapper[4651]: I1011 04:52:41.021469 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:41 crc kubenswrapper[4651]: I1011 04:52:41.021490 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:41 crc kubenswrapper[4651]: I1011 04:52:41.021510 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:41Z","lastTransitionTime":"2025-10-11T04:52:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:41 crc kubenswrapper[4651]: I1011 04:52:41.124051 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:41 crc kubenswrapper[4651]: I1011 04:52:41.124183 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:41 crc kubenswrapper[4651]: I1011 04:52:41.124203 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:41 crc kubenswrapper[4651]: I1011 04:52:41.124228 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:41 crc kubenswrapper[4651]: I1011 04:52:41.124245 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:41Z","lastTransitionTime":"2025-10-11T04:52:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:41 crc kubenswrapper[4651]: I1011 04:52:41.226877 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:41 crc kubenswrapper[4651]: I1011 04:52:41.226935 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:41 crc kubenswrapper[4651]: I1011 04:52:41.226952 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:41 crc kubenswrapper[4651]: I1011 04:52:41.226975 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:41 crc kubenswrapper[4651]: I1011 04:52:41.226992 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:41Z","lastTransitionTime":"2025-10-11T04:52:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:41 crc kubenswrapper[4651]: I1011 04:52:41.329795 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:41 crc kubenswrapper[4651]: I1011 04:52:41.329861 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:41 crc kubenswrapper[4651]: I1011 04:52:41.329875 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:41 crc kubenswrapper[4651]: I1011 04:52:41.329893 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:41 crc kubenswrapper[4651]: I1011 04:52:41.329904 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:41Z","lastTransitionTime":"2025-10-11T04:52:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:41 crc kubenswrapper[4651]: I1011 04:52:41.342758 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6zt9s_28e01c08-a461-4f44-a49c-4bf92fd3a2ce/ovnkube-controller/3.log" Oct 11 04:52:41 crc kubenswrapper[4651]: I1011 04:52:41.343870 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6zt9s_28e01c08-a461-4f44-a49c-4bf92fd3a2ce/ovnkube-controller/2.log" Oct 11 04:52:41 crc kubenswrapper[4651]: I1011 04:52:41.346912 4651 generic.go:334] "Generic (PLEG): container finished" podID="28e01c08-a461-4f44-a49c-4bf92fd3a2ce" containerID="bedbf9638749a3e9eca8260d325aaa6064bbb0ce8ea6e1cd6042e0039175c968" exitCode=1 Oct 11 04:52:41 crc kubenswrapper[4651]: I1011 04:52:41.347041 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" event={"ID":"28e01c08-a461-4f44-a49c-4bf92fd3a2ce","Type":"ContainerDied","Data":"bedbf9638749a3e9eca8260d325aaa6064bbb0ce8ea6e1cd6042e0039175c968"} Oct 11 04:52:41 crc kubenswrapper[4651]: I1011 04:52:41.347637 4651 scope.go:117] "RemoveContainer" containerID="bedbf9638749a3e9eca8260d325aaa6064bbb0ce8ea6e1cd6042e0039175c968" Oct 11 04:52:41 crc kubenswrapper[4651]: I1011 04:52:41.347847 4651 scope.go:117] "RemoveContainer" containerID="e0575c5b690a6b170f1e7dbb71dc3d09d4dbb86071a2363d6d7c4c85e2c78320" Oct 11 04:52:41 crc kubenswrapper[4651]: E1011 04:52:41.348108 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-6zt9s_openshift-ovn-kubernetes(28e01c08-a461-4f44-a49c-4bf92fd3a2ce)\"" pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" podUID="28e01c08-a461-4f44-a49c-4bf92fd3a2ce" Oct 11 04:52:41 crc kubenswrapper[4651]: I1011 04:52:41.367849 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:41Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:41 crc kubenswrapper[4651]: I1011 04:52:41.385455 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kwhmr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbcac3cb-b774-47ec-a86c-b22191d14d99\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52a0f9e11cf4e8c69d5b356b702ce9df8cbc935daadadf531004a23e5f6dad65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7n87\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kwhmr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:41Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:41 crc kubenswrapper[4651]: I1011 04:52:41.411507 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0fd0aaae2760a1c934aed54b8e07528f0f78db1149c9e1f8674bec90b61a7078\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://788d6a46d740de869fe7cd1d28479f8115a3515c7bd2482e06cbc4dba7b27fac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68954082d4369b9b4d95ba1f15e19593dc330b698562be413055b2ba012b5f8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3eed8335754437c935f07223bf4c4f40f9ab0bbdc9a86b7610f7f6fd55140e37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23b3fde15e637081cbf5fad2e55ccea1f0fe63d39ff5f82a60ce05ffeef9a837\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99f927067aabe4690e99fe50a069afdf68b22d950a8ae141235571ced468c44e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bedbf9638749a3e9eca8260d325aaa6064bbb0ce8ea6e1cd6042e0039175c968\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0575c5b690a6b170f1e7dbb71dc3d09d4dbb86071a2363d6d7c4c85e2c78320\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-11T04:52:10Z\\\",\\\"message\\\":\\\"andler 8\\\\nI1011 04:52:10.796228 6293 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1011 04:52:10.796255 6293 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1011 04:52:10.796280 6293 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1011 04:52:10.796326 6293 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1011 04:52:10.796340 6293 handler.go:208] Removed *v1.Node event handler 2\\\\nI1011 04:52:10.796359 6293 handler.go:208] Removed *v1.Node event handler 7\\\\nI1011 04:52:10.796378 6293 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI1011 04:52:10.796447 6293 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1011 04:52:10.796476 6293 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI1011 04:52:10.796536 6293 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1011 04:52:10.796553 6293 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1011 04:52:10.796592 6293 factory.go:656] Stopping watch factory\\\\nI1011 04:52:10.796634 6293 ovnkube.go:599] Stopped ovnkube\\\\nI1011 04:52:10.796638 6293 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1011 04:52:10.796674 6293 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1011 04:52:10.796792 6293 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T04:52:10Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bedbf9638749a3e9eca8260d325aaa6064bbb0ce8ea6e1cd6042e0039175c968\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-11T04:52:40Z\\\",\\\"message\\\":\\\"ice k8s.ovn.org/owner:openshift-console/console]} name:Service_openshift-console/console_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.194:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {d7d7b270-1480-47f8-bdf9-690dbab310cb}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1011 04:52:40.811464 6694 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-network-console/networking-console-plugin]} name:Service_openshift-network-console/networking-console-plugin_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.246:9443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {ab0b1d51-5ec6-479b-8881-93dfa8d30337}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1011 04:52:40.811537 6694 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T04:52:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://add3ed68e0bf59bc569248a3660cfc9a20995641ec65b0bf7133e47c366aad73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccd6180f4c5665d62ee9458795318ef2474c1938d19e6cbfec9c2174fe10e020\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ccd6180f4c5665d62ee9458795318ef2474c1938d19e6cbfec9c2174fe10e020\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6zt9s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:41Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:41 crc kubenswrapper[4651]: I1011 04:52:41.434361 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:41 crc kubenswrapper[4651]: I1011 04:52:41.434398 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:41 crc kubenswrapper[4651]: I1011 04:52:41.434410 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:41 crc kubenswrapper[4651]: I1011 04:52:41.434427 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:41 crc kubenswrapper[4651]: I1011 04:52:41.434440 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:41Z","lastTransitionTime":"2025-10-11T04:52:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:41 crc kubenswrapper[4651]: I1011 04:52:41.437791 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pgwvb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"215170a8-84a9-4e15-9b0e-c1200c680f30\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a036609457cdb5a74a4ccf65ffa2e502f32994e6e3f1ef8a4fd4b89c5454773\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c80b1c5385cb92e140c43bc982ee7cda6406eac9dfe8d9449eaf45332ccc506\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c80b1c5385cb92e140c43bc982ee7cda6406eac9dfe8d9449eaf45332ccc506\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1575fc8fd03d4047a3ed1c0c9df99b77e40296b876d2913a790128f39733d45e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1575fc8fd03d4047a3ed1c0c9df99b77e40296b876d2913a790128f39733d45e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://626e511c110034060639be8565505dc122e834fc63a76d9c4e21dd1dbd5d5a62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://626e511c110034060639be8565505dc122e834fc63a76d9c4e21dd1dbd5d5a62\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9d55cd3f36dcad738ffd05b10e3f665c6bbb949c25365340e70b3c4b53e4175\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9d55cd3f36dcad738ffd05b10e3f665c6bbb949c25365340e70b3c4b53e4175\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27f664812354e7e2ea5feaa075c7f9f72262ddf0a7959196f7a7b14be5af1924\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://27f664812354e7e2ea5feaa075c7f9f72262ddf0a7959196f7a7b14be5af1924\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3b9eccb883d262047b87eae2358f6d27cbd8df5b6917125a8a6259b2eb3433a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3b9eccb883d262047b87eae2358f6d27cbd8df5b6917125a8a6259b2eb3433a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pgwvb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:41Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:41 crc kubenswrapper[4651]: I1011 04:52:41.452630 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qsgwc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f5830d7-3c6f-48f0-b103-a228e7f8e448\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c307ee086caacd0a5299325126ff0b4b140aa1e18153b8f66696534543bb069\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pbjvz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e30f3d9db8e89696a8a98ab24ca995088afa72c957e19ca59ace228a23268d9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pbjvz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:52Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qsgwc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:41Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:41 crc kubenswrapper[4651]: I1011 04:52:41.471901 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d425a775a5e278ee77231764e58cac3441e04c383eee54f69ba84488ca640eae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:41Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:41 crc kubenswrapper[4651]: I1011 04:52:41.488845 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55e4c9c2d46f67bb0f8ccdb2de27dc82f0280476613b3b30d587ff6f5784e787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:41Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:41 crc kubenswrapper[4651]: I1011 04:52:41.503562 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e5b8526fed77f2e541d6793dc36a902daebce60d367f28f9efb45f7fabb44fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e278c9a5d14f026733737226a6ffc91202b760e48622d5d0031678981fa5f857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:41Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:41 crc kubenswrapper[4651]: I1011 04:52:41.524963 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47b36edf-f918-4105-bb64-66bb71a4b8c4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9521c3af1580aa2a62fc760916de9dc4abdc437700567a6ae5df3a96da2e8942\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6fdcb798e11fca3b7b427f5ef0ac955a893709ffedaa472aaab6c219994e940\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff57b9678d77f617cfc792258091526c2a2f8125c240abfe0b15f693abc948eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11e12782179b96dac12bfdd3a25526e93f18da255e2d2fb0d7522e6d9c373b4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e14956f18479aa61fa0755b9c6c4f26ba7ac18f3023e177a76b9a59101b37c1\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-11T04:51:33Z\\\",\\\"message\\\":\\\"W1011 04:51:22.963111 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1011 04:51:22.963696 1 crypto.go:601] Generating new CA for check-endpoints-signer@1760158282 cert, and key in /tmp/serving-cert-3362973750/serving-signer.crt, /tmp/serving-cert-3362973750/serving-signer.key\\\\nI1011 04:51:23.197526 1 observer_polling.go:159] Starting file observer\\\\nW1011 04:51:23.200452 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1011 04:51:23.200572 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1011 04:51:23.202397 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3362973750/tls.crt::/tmp/serving-cert-3362973750/tls.key\\\\\\\"\\\\nF1011 04:51:33.422520 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f8633747abf191f040cb14f5a76529487743229338070aeca597fc93dae8b3f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55c5a1cac9b1045ca931771e5a096d29e8d986a4d9baae8d779d94f2a30b6cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55c5a1cac9b1045ca931771e5a096d29e8d986a4d9baae8d779d94f2a30b6cd3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:41Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:41 crc kubenswrapper[4651]: I1011 04:52:41.537481 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:41 crc kubenswrapper[4651]: I1011 04:52:41.537549 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:41 crc kubenswrapper[4651]: I1011 04:52:41.537559 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:41 crc kubenswrapper[4651]: I1011 04:52:41.537581 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:41 crc kubenswrapper[4651]: I1011 04:52:41.537592 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:41Z","lastTransitionTime":"2025-10-11T04:52:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:41 crc kubenswrapper[4651]: I1011 04:52:41.541649 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"744aa461-2868-440d-a1b9-6d30c0c50b56\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aabba06dd47ad528f830a47cc79ddabb3789d24ebed62a6e8f976df1b156ef1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb996f17546fc77ac1b8ed27428aaa6060822daa0156cf016dbb24ed278403a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76ee2612930be6596c0186cc000d80039b52e225fd64102002ed9316a0605521\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c6263dc47a8284e9849da1fb15c4788174252f3637665ac6e4b19298ca90c587\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:41Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:41 crc kubenswrapper[4651]: I1011 04:52:41.562332 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8dacbe38-be95-4b56-a204-f87d2e8d6496\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://918c77155c7ad4e14a9706e6e36a26cf2c774133b3435468d326b1b8c1f29f45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf6183b5381ec04f62d32175471097bd2d2088003063202375b88ccfb9080fae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2aaeb6d47f2435f2865c7516d976fecaf6de20b458b5cdcea1cdf59449cdef9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://96c869e21e725921f47799368a3327f628bbdd7d7db8b4d0f29bf27b4d04551b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96c869e21e725921f47799368a3327f628bbdd7d7db8b4d0f29bf27b4d04551b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:19Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:41Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:41 crc kubenswrapper[4651]: I1011 04:52:41.581770 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:41Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:41 crc kubenswrapper[4651]: I1011 04:52:41.596041 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"519a1ae1-e964-48b0-8b61-835146df28c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f12e36a570de649e1df8107ea828cd8e65c18e111de191d3796fc1f3134e43c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wkk99\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://583486a7d6efd8d25aaeac78ea92f84834730c641516b6ade77aeda2baef58e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wkk99\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-78jnv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:41Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:41 crc kubenswrapper[4651]: I1011 04:52:41.618167 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wz4hw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fbfdd781-994b-49b4-9c8e-edc0ea4145d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://453b8d5da6858078639895d4d19bb1783eefeeb7c558eec6984f6ebdadd5d8fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d37b5524c49533e7dad8b61f0e20b7d1e37b7ae1afc5b9bfe9146a0744202a2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-11T04:52:27Z\\\",\\\"message\\\":\\\"2025-10-11T04:51:42+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_6af57944-ba42-4ed4-a4aa-c5a5d8283647\\\\n2025-10-11T04:51:42+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_6af57944-ba42-4ed4-a4aa-c5a5d8283647 to /host/opt/cni/bin/\\\\n2025-10-11T04:51:42Z [verbose] multus-daemon started\\\\n2025-10-11T04:51:42Z [verbose] Readiness Indicator file check\\\\n2025-10-11T04:52:27Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:52:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c96nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wz4hw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:41Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:41 crc kubenswrapper[4651]: I1011 04:52:41.634461 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"499d68ba-8709-4a00-a42b-ccd984f55fa7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0795ced06735c12d7560b7563d12ebbda3afb26aab35978a6f8b42216eeb1cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dba40c78d42cfd10414df43e27860b89083b6988bc0f0375c286af81e8a7ef32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dba40c78d42cfd10414df43e27860b89083b6988bc0f0375c286af81e8a7ef32\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:19Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:41Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:41 crc kubenswrapper[4651]: I1011 04:52:41.640618 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:41 crc kubenswrapper[4651]: I1011 04:52:41.640703 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:41 crc kubenswrapper[4651]: I1011 04:52:41.640724 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:41 crc kubenswrapper[4651]: I1011 04:52:41.640754 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:41 crc kubenswrapper[4651]: I1011 04:52:41.640775 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:41Z","lastTransitionTime":"2025-10-11T04:52:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:41 crc kubenswrapper[4651]: I1011 04:52:41.652545 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-phsgk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e8fb74d-013d-4103-b029-e8416d079dcf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b7407b7da9820bbcdac4ce9c49c4520834a466b0db187c5f861972713ba1583f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4ftf8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-phsgk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:41Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:41 crc kubenswrapper[4651]: I1011 04:52:41.673779 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:41Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:41 crc kubenswrapper[4651]: I1011 04:52:41.695982 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-tgvv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a551fed8-58fb-48ae-88af-8dc0cb48fc30\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:53Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:53Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:53Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nphhq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nphhq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:53Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-tgvv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:41Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:41 crc kubenswrapper[4651]: I1011 04:52:41.743644 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:41 crc kubenswrapper[4651]: I1011 04:52:41.743702 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:41 crc kubenswrapper[4651]: I1011 04:52:41.743720 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:41 crc kubenswrapper[4651]: I1011 04:52:41.743741 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:41 crc kubenswrapper[4651]: I1011 04:52:41.743754 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:41Z","lastTransitionTime":"2025-10-11T04:52:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:41 crc kubenswrapper[4651]: I1011 04:52:41.848063 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:41 crc kubenswrapper[4651]: I1011 04:52:41.848152 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:41 crc kubenswrapper[4651]: I1011 04:52:41.848173 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:41 crc kubenswrapper[4651]: I1011 04:52:41.848205 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:41 crc kubenswrapper[4651]: I1011 04:52:41.848228 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:41Z","lastTransitionTime":"2025-10-11T04:52:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:41 crc kubenswrapper[4651]: I1011 04:52:41.869044 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tgvv8" Oct 11 04:52:41 crc kubenswrapper[4651]: I1011 04:52:41.869073 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 04:52:41 crc kubenswrapper[4651]: I1011 04:52:41.869240 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 04:52:41 crc kubenswrapper[4651]: E1011 04:52:41.869405 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tgvv8" podUID="a551fed8-58fb-48ae-88af-8dc0cb48fc30" Oct 11 04:52:41 crc kubenswrapper[4651]: E1011 04:52:41.869593 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 04:52:41 crc kubenswrapper[4651]: E1011 04:52:41.869726 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 04:52:41 crc kubenswrapper[4651]: I1011 04:52:41.951282 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:41 crc kubenswrapper[4651]: I1011 04:52:41.951616 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:41 crc kubenswrapper[4651]: I1011 04:52:41.951805 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:41 crc kubenswrapper[4651]: I1011 04:52:41.952106 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:41 crc kubenswrapper[4651]: I1011 04:52:41.952348 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:41Z","lastTransitionTime":"2025-10-11T04:52:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:42 crc kubenswrapper[4651]: I1011 04:52:42.058064 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:42 crc kubenswrapper[4651]: I1011 04:52:42.058139 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:42 crc kubenswrapper[4651]: I1011 04:52:42.058160 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:42 crc kubenswrapper[4651]: I1011 04:52:42.058198 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:42 crc kubenswrapper[4651]: I1011 04:52:42.058226 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:42Z","lastTransitionTime":"2025-10-11T04:52:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:42 crc kubenswrapper[4651]: I1011 04:52:42.161737 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:42 crc kubenswrapper[4651]: I1011 04:52:42.161798 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:42 crc kubenswrapper[4651]: I1011 04:52:42.161862 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:42 crc kubenswrapper[4651]: I1011 04:52:42.161889 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:42 crc kubenswrapper[4651]: I1011 04:52:42.161903 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:42Z","lastTransitionTime":"2025-10-11T04:52:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:42 crc kubenswrapper[4651]: I1011 04:52:42.265718 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:42 crc kubenswrapper[4651]: I1011 04:52:42.265776 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:42 crc kubenswrapper[4651]: I1011 04:52:42.265798 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:42 crc kubenswrapper[4651]: I1011 04:52:42.265863 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:42 crc kubenswrapper[4651]: I1011 04:52:42.265901 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:42Z","lastTransitionTime":"2025-10-11T04:52:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:42 crc kubenswrapper[4651]: I1011 04:52:42.352417 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6zt9s_28e01c08-a461-4f44-a49c-4bf92fd3a2ce/ovnkube-controller/3.log" Oct 11 04:52:42 crc kubenswrapper[4651]: I1011 04:52:42.355821 4651 scope.go:117] "RemoveContainer" containerID="bedbf9638749a3e9eca8260d325aaa6064bbb0ce8ea6e1cd6042e0039175c968" Oct 11 04:52:42 crc kubenswrapper[4651]: E1011 04:52:42.355983 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-6zt9s_openshift-ovn-kubernetes(28e01c08-a461-4f44-a49c-4bf92fd3a2ce)\"" pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" podUID="28e01c08-a461-4f44-a49c-4bf92fd3a2ce" Oct 11 04:52:42 crc kubenswrapper[4651]: I1011 04:52:42.368218 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:42 crc kubenswrapper[4651]: I1011 04:52:42.368257 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:42 crc kubenswrapper[4651]: I1011 04:52:42.368268 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:42 crc kubenswrapper[4651]: I1011 04:52:42.368287 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:42 crc kubenswrapper[4651]: I1011 04:52:42.368300 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:42Z","lastTransitionTime":"2025-10-11T04:52:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:42 crc kubenswrapper[4651]: I1011 04:52:42.373307 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d425a775a5e278ee77231764e58cac3441e04c383eee54f69ba84488ca640eae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:42Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:42 crc kubenswrapper[4651]: I1011 04:52:42.387594 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55e4c9c2d46f67bb0f8ccdb2de27dc82f0280476613b3b30d587ff6f5784e787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:42Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:42 crc kubenswrapper[4651]: I1011 04:52:42.403279 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e5b8526fed77f2e541d6793dc36a902daebce60d367f28f9efb45f7fabb44fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e278c9a5d14f026733737226a6ffc91202b760e48622d5d0031678981fa5f857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:42Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:42 crc kubenswrapper[4651]: I1011 04:52:42.444212 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:42Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:42 crc kubenswrapper[4651]: I1011 04:52:42.465664 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"519a1ae1-e964-48b0-8b61-835146df28c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f12e36a570de649e1df8107ea828cd8e65c18e111de191d3796fc1f3134e43c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wkk99\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://583486a7d6efd8d25aaeac78ea92f84834730c641516b6ade77aeda2baef58e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wkk99\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-78jnv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:42Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:42 crc kubenswrapper[4651]: I1011 04:52:42.470303 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:42 crc kubenswrapper[4651]: I1011 04:52:42.470336 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:42 crc kubenswrapper[4651]: I1011 04:52:42.470348 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:42 crc kubenswrapper[4651]: I1011 04:52:42.470369 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:42 crc kubenswrapper[4651]: I1011 04:52:42.470380 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:42Z","lastTransitionTime":"2025-10-11T04:52:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:42 crc kubenswrapper[4651]: I1011 04:52:42.485285 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wz4hw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fbfdd781-994b-49b4-9c8e-edc0ea4145d1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://453b8d5da6858078639895d4d19bb1783eefeeb7c558eec6984f6ebdadd5d8fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d37b5524c49533e7dad8b61f0e20b7d1e37b7ae1afc5b9bfe9146a0744202a2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-11T04:52:27Z\\\",\\\"message\\\":\\\"2025-10-11T04:51:42+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_6af57944-ba42-4ed4-a4aa-c5a5d8283647\\\\n2025-10-11T04:51:42+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_6af57944-ba42-4ed4-a4aa-c5a5d8283647 to /host/opt/cni/bin/\\\\n2025-10-11T04:51:42Z [verbose] multus-daemon started\\\\n2025-10-11T04:51:42Z [verbose] Readiness Indicator file check\\\\n2025-10-11T04:52:27Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:52:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-c96nx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wz4hw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:42Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:42 crc kubenswrapper[4651]: I1011 04:52:42.504470 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47b36edf-f918-4105-bb64-66bb71a4b8c4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9521c3af1580aa2a62fc760916de9dc4abdc437700567a6ae5df3a96da2e8942\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6fdcb798e11fca3b7b427f5ef0ac955a893709ffedaa472aaab6c219994e940\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff57b9678d77f617cfc792258091526c2a2f8125c240abfe0b15f693abc948eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11e12782179b96dac12bfdd3a25526e93f18da255e2d2fb0d7522e6d9c373b4e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e14956f18479aa61fa0755b9c6c4f26ba7ac18f3023e177a76b9a59101b37c1\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-11T04:51:33Z\\\",\\\"message\\\":\\\"W1011 04:51:22.963111 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1011 04:51:22.963696 1 crypto.go:601] Generating new CA for check-endpoints-signer@1760158282 cert, and key in /tmp/serving-cert-3362973750/serving-signer.crt, /tmp/serving-cert-3362973750/serving-signer.key\\\\nI1011 04:51:23.197526 1 observer_polling.go:159] Starting file observer\\\\nW1011 04:51:23.200452 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1011 04:51:23.200572 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1011 04:51:23.202397 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3362973750/tls.crt::/tmp/serving-cert-3362973750/tls.key\\\\\\\"\\\\nF1011 04:51:33.422520 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f8633747abf191f040cb14f5a76529487743229338070aeca597fc93dae8b3f1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://55c5a1cac9b1045ca931771e5a096d29e8d986a4d9baae8d779d94f2a30b6cd3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://55c5a1cac9b1045ca931771e5a096d29e8d986a4d9baae8d779d94f2a30b6cd3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:42Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:42 crc kubenswrapper[4651]: I1011 04:52:42.516268 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"744aa461-2868-440d-a1b9-6d30c0c50b56\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aabba06dd47ad528f830a47cc79ddabb3789d24ebed62a6e8f976df1b156ef1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb996f17546fc77ac1b8ed27428aaa6060822daa0156cf016dbb24ed278403a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76ee2612930be6596c0186cc000d80039b52e225fd64102002ed9316a0605521\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c6263dc47a8284e9849da1fb15c4788174252f3637665ac6e4b19298ca90c587\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:42Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:42 crc kubenswrapper[4651]: I1011 04:52:42.527623 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8dacbe38-be95-4b56-a204-f87d2e8d6496\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://918c77155c7ad4e14a9706e6e36a26cf2c774133b3435468d326b1b8c1f29f45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf6183b5381ec04f62d32175471097bd2d2088003063202375b88ccfb9080fae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2aaeb6d47f2435f2865c7516d976fecaf6de20b458b5cdcea1cdf59449cdef9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://96c869e21e725921f47799368a3327f628bbdd7d7db8b4d0f29bf27b4d04551b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96c869e21e725921f47799368a3327f628bbdd7d7db8b4d0f29bf27b4d04551b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:19Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:42Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:42 crc kubenswrapper[4651]: I1011 04:52:42.538284 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"499d68ba-8709-4a00-a42b-ccd984f55fa7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0795ced06735c12d7560b7563d12ebbda3afb26aab35978a6f8b42216eeb1cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dba40c78d42cfd10414df43e27860b89083b6988bc0f0375c286af81e8a7ef32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dba40c78d42cfd10414df43e27860b89083b6988bc0f0375c286af81e8a7ef32\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:19Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:42Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:42 crc kubenswrapper[4651]: I1011 04:52:42.546924 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-phsgk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e8fb74d-013d-4103-b029-e8416d079dcf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b7407b7da9820bbcdac4ce9c49c4520834a466b0db187c5f861972713ba1583f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4ftf8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-phsgk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:42Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:42 crc kubenswrapper[4651]: I1011 04:52:42.556611 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:42Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:42 crc kubenswrapper[4651]: I1011 04:52:42.569004 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-tgvv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a551fed8-58fb-48ae-88af-8dc0cb48fc30\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:53Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:53Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:53Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nphhq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nphhq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:53Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-tgvv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:42Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:42 crc kubenswrapper[4651]: I1011 04:52:42.572203 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:42 crc kubenswrapper[4651]: I1011 04:52:42.572248 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:42 crc kubenswrapper[4651]: I1011 04:52:42.572258 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:42 crc kubenswrapper[4651]: I1011 04:52:42.572277 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:42 crc kubenswrapper[4651]: I1011 04:52:42.572288 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:42Z","lastTransitionTime":"2025-10-11T04:52:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:42 crc kubenswrapper[4651]: I1011 04:52:42.580926 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qsgwc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f5830d7-3c6f-48f0-b103-a228e7f8e448\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c307ee086caacd0a5299325126ff0b4b140aa1e18153b8f66696534543bb069\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pbjvz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e30f3d9db8e89696a8a98ab24ca995088afa72c957e19ca59ace228a23268d9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pbjvz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:52Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qsgwc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:42Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:42 crc kubenswrapper[4651]: I1011 04:52:42.593014 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:42Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:42 crc kubenswrapper[4651]: I1011 04:52:42.603412 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kwhmr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbcac3cb-b774-47ec-a86c-b22191d14d99\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52a0f9e11cf4e8c69d5b356b702ce9df8cbc935daadadf531004a23e5f6dad65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7n87\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:39Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kwhmr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:42Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:42 crc kubenswrapper[4651]: I1011 04:52:42.627477 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0fd0aaae2760a1c934aed54b8e07528f0f78db1149c9e1f8674bec90b61a7078\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://788d6a46d740de869fe7cd1d28479f8115a3515c7bd2482e06cbc4dba7b27fac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://68954082d4369b9b4d95ba1f15e19593dc330b698562be413055b2ba012b5f8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3eed8335754437c935f07223bf4c4f40f9ab0bbdc9a86b7610f7f6fd55140e37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23b3fde15e637081cbf5fad2e55ccea1f0fe63d39ff5f82a60ce05ffeef9a837\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99f927067aabe4690e99fe50a069afdf68b22d950a8ae141235571ced468c44e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bedbf9638749a3e9eca8260d325aaa6064bbb0ce8ea6e1cd6042e0039175c968\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bedbf9638749a3e9eca8260d325aaa6064bbb0ce8ea6e1cd6042e0039175c968\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-11T04:52:40Z\\\",\\\"message\\\":\\\"ice k8s.ovn.org/owner:openshift-console/console]} name:Service_openshift-console/console_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.194:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {d7d7b270-1480-47f8-bdf9-690dbab310cb}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1011 04:52:40.811464 6694 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-network-console/networking-console-plugin]} name:Service_openshift-network-console/networking-console-plugin_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.246:9443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {ab0b1d51-5ec6-479b-8881-93dfa8d30337}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1011 04:52:40.811537 6694 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T04:52:39Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-6zt9s_openshift-ovn-kubernetes(28e01c08-a461-4f44-a49c-4bf92fd3a2ce)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://add3ed68e0bf59bc569248a3660cfc9a20995641ec65b0bf7133e47c366aad73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ccd6180f4c5665d62ee9458795318ef2474c1938d19e6cbfec9c2174fe10e020\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ccd6180f4c5665d62ee9458795318ef2474c1938d19e6cbfec9c2174fe10e020\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9tctk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6zt9s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:42Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:42 crc kubenswrapper[4651]: I1011 04:52:42.643773 4651 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-pgwvb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"215170a8-84a9-4e15-9b0e-c1200c680f30\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T04:51:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a036609457cdb5a74a4ccf65ffa2e502f32994e6e3f1ef8a4fd4b89c5454773\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T04:51:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c80b1c5385cb92e140c43bc982ee7cda6406eac9dfe8d9449eaf45332ccc506\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c80b1c5385cb92e140c43bc982ee7cda6406eac9dfe8d9449eaf45332ccc506\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1575fc8fd03d4047a3ed1c0c9df99b77e40296b876d2913a790128f39733d45e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1575fc8fd03d4047a3ed1c0c9df99b77e40296b876d2913a790128f39733d45e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://626e511c110034060639be8565505dc122e834fc63a76d9c4e21dd1dbd5d5a62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://626e511c110034060639be8565505dc122e834fc63a76d9c4e21dd1dbd5d5a62\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9d55cd3f36dcad738ffd05b10e3f665c6bbb949c25365340e70b3c4b53e4175\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9d55cd3f36dcad738ffd05b10e3f665c6bbb949c25365340e70b3c4b53e4175\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://27f664812354e7e2ea5feaa075c7f9f72262ddf0a7959196f7a7b14be5af1924\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://27f664812354e7e2ea5feaa075c7f9f72262ddf0a7959196f7a7b14be5af1924\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3b9eccb883d262047b87eae2358f6d27cbd8df5b6917125a8a6259b2eb3433a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3b9eccb883d262047b87eae2358f6d27cbd8df5b6917125a8a6259b2eb3433a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T04:51:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T04:51:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-52sdp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T04:51:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-pgwvb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:42Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:42 crc kubenswrapper[4651]: I1011 04:52:42.674541 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:42 crc kubenswrapper[4651]: I1011 04:52:42.674588 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:42 crc kubenswrapper[4651]: I1011 04:52:42.674605 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:42 crc kubenswrapper[4651]: I1011 04:52:42.674627 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:42 crc kubenswrapper[4651]: I1011 04:52:42.674644 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:42Z","lastTransitionTime":"2025-10-11T04:52:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:42 crc kubenswrapper[4651]: I1011 04:52:42.777023 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:42 crc kubenswrapper[4651]: I1011 04:52:42.777057 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:42 crc kubenswrapper[4651]: I1011 04:52:42.777067 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:42 crc kubenswrapper[4651]: I1011 04:52:42.777080 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:42 crc kubenswrapper[4651]: I1011 04:52:42.777091 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:42Z","lastTransitionTime":"2025-10-11T04:52:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:42 crc kubenswrapper[4651]: I1011 04:52:42.869112 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 04:52:42 crc kubenswrapper[4651]: E1011 04:52:42.869245 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 04:52:42 crc kubenswrapper[4651]: I1011 04:52:42.879898 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:42 crc kubenswrapper[4651]: I1011 04:52:42.879956 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:42 crc kubenswrapper[4651]: I1011 04:52:42.879982 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:42 crc kubenswrapper[4651]: I1011 04:52:42.880007 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:42 crc kubenswrapper[4651]: I1011 04:52:42.880024 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:42Z","lastTransitionTime":"2025-10-11T04:52:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:42 crc kubenswrapper[4651]: I1011 04:52:42.983079 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:42 crc kubenswrapper[4651]: I1011 04:52:42.983128 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:42 crc kubenswrapper[4651]: I1011 04:52:42.983142 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:42 crc kubenswrapper[4651]: I1011 04:52:42.983160 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:42 crc kubenswrapper[4651]: I1011 04:52:42.983172 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:42Z","lastTransitionTime":"2025-10-11T04:52:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:43 crc kubenswrapper[4651]: I1011 04:52:43.089788 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:43 crc kubenswrapper[4651]: I1011 04:52:43.089916 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:43 crc kubenswrapper[4651]: I1011 04:52:43.089943 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:43 crc kubenswrapper[4651]: I1011 04:52:43.089978 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:43 crc kubenswrapper[4651]: I1011 04:52:43.090015 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:43Z","lastTransitionTime":"2025-10-11T04:52:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:43 crc kubenswrapper[4651]: I1011 04:52:43.193880 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:43 crc kubenswrapper[4651]: I1011 04:52:43.193941 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:43 crc kubenswrapper[4651]: I1011 04:52:43.193960 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:43 crc kubenswrapper[4651]: I1011 04:52:43.193984 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:43 crc kubenswrapper[4651]: I1011 04:52:43.194002 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:43Z","lastTransitionTime":"2025-10-11T04:52:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:43 crc kubenswrapper[4651]: I1011 04:52:43.296853 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:43 crc kubenswrapper[4651]: I1011 04:52:43.296926 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:43 crc kubenswrapper[4651]: I1011 04:52:43.296943 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:43 crc kubenswrapper[4651]: I1011 04:52:43.297388 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:43 crc kubenswrapper[4651]: I1011 04:52:43.297812 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:43Z","lastTransitionTime":"2025-10-11T04:52:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:43 crc kubenswrapper[4651]: I1011 04:52:43.401164 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:43 crc kubenswrapper[4651]: I1011 04:52:43.401211 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:43 crc kubenswrapper[4651]: I1011 04:52:43.401227 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:43 crc kubenswrapper[4651]: I1011 04:52:43.401253 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:43 crc kubenswrapper[4651]: I1011 04:52:43.401274 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:43Z","lastTransitionTime":"2025-10-11T04:52:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:43 crc kubenswrapper[4651]: I1011 04:52:43.504639 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:43 crc kubenswrapper[4651]: I1011 04:52:43.504700 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:43 crc kubenswrapper[4651]: I1011 04:52:43.504717 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:43 crc kubenswrapper[4651]: I1011 04:52:43.504741 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:43 crc kubenswrapper[4651]: I1011 04:52:43.504759 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:43Z","lastTransitionTime":"2025-10-11T04:52:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:43 crc kubenswrapper[4651]: I1011 04:52:43.607960 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:43 crc kubenswrapper[4651]: I1011 04:52:43.608020 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:43 crc kubenswrapper[4651]: I1011 04:52:43.608037 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:43 crc kubenswrapper[4651]: I1011 04:52:43.608063 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:43 crc kubenswrapper[4651]: I1011 04:52:43.608138 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:43Z","lastTransitionTime":"2025-10-11T04:52:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:43 crc kubenswrapper[4651]: I1011 04:52:43.660908 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 04:52:43 crc kubenswrapper[4651]: E1011 04:52:43.661018 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 04:53:47.660996157 +0000 UTC m=+148.557228963 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 04:52:43 crc kubenswrapper[4651]: I1011 04:52:43.661075 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 04:52:43 crc kubenswrapper[4651]: I1011 04:52:43.661111 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 04:52:43 crc kubenswrapper[4651]: E1011 04:52:43.661191 4651 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 11 04:52:43 crc kubenswrapper[4651]: E1011 04:52:43.661215 4651 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 11 04:52:43 crc kubenswrapper[4651]: E1011 04:52:43.661236 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-11 04:53:47.661227282 +0000 UTC m=+148.557460098 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 11 04:52:43 crc kubenswrapper[4651]: E1011 04:52:43.661255 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-11 04:53:47.661243873 +0000 UTC m=+148.557476679 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 11 04:52:43 crc kubenswrapper[4651]: I1011 04:52:43.711051 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:43 crc kubenswrapper[4651]: I1011 04:52:43.711099 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:43 crc kubenswrapper[4651]: I1011 04:52:43.711116 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:43 crc kubenswrapper[4651]: I1011 04:52:43.711136 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:43 crc kubenswrapper[4651]: I1011 04:52:43.711151 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:43Z","lastTransitionTime":"2025-10-11T04:52:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:43 crc kubenswrapper[4651]: I1011 04:52:43.761919 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 04:52:43 crc kubenswrapper[4651]: I1011 04:52:43.761972 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 04:52:43 crc kubenswrapper[4651]: E1011 04:52:43.762121 4651 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 11 04:52:43 crc kubenswrapper[4651]: E1011 04:52:43.762135 4651 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 11 04:52:43 crc kubenswrapper[4651]: E1011 04:52:43.762148 4651 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 11 04:52:43 crc kubenswrapper[4651]: E1011 04:52:43.762153 4651 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 11 04:52:43 crc kubenswrapper[4651]: E1011 04:52:43.762186 4651 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 11 04:52:43 crc kubenswrapper[4651]: E1011 04:52:43.762201 4651 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 11 04:52:43 crc kubenswrapper[4651]: E1011 04:52:43.762189 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-11 04:53:47.762176907 +0000 UTC m=+148.658409703 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 11 04:52:43 crc kubenswrapper[4651]: E1011 04:52:43.762272 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-11 04:53:47.762254159 +0000 UTC m=+148.658486975 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 11 04:52:43 crc kubenswrapper[4651]: I1011 04:52:43.813859 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:43 crc kubenswrapper[4651]: I1011 04:52:43.814166 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:43 crc kubenswrapper[4651]: I1011 04:52:43.814175 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:43 crc kubenswrapper[4651]: I1011 04:52:43.814190 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:43 crc kubenswrapper[4651]: I1011 04:52:43.814199 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:43Z","lastTransitionTime":"2025-10-11T04:52:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:43 crc kubenswrapper[4651]: I1011 04:52:43.870071 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 04:52:43 crc kubenswrapper[4651]: I1011 04:52:43.870162 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tgvv8" Oct 11 04:52:43 crc kubenswrapper[4651]: I1011 04:52:43.870280 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 04:52:43 crc kubenswrapper[4651]: E1011 04:52:43.870281 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 04:52:43 crc kubenswrapper[4651]: E1011 04:52:43.870455 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tgvv8" podUID="a551fed8-58fb-48ae-88af-8dc0cb48fc30" Oct 11 04:52:43 crc kubenswrapper[4651]: E1011 04:52:43.870531 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 04:52:43 crc kubenswrapper[4651]: I1011 04:52:43.916490 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:43 crc kubenswrapper[4651]: I1011 04:52:43.916542 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:43 crc kubenswrapper[4651]: I1011 04:52:43.916559 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:43 crc kubenswrapper[4651]: I1011 04:52:43.916578 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:43 crc kubenswrapper[4651]: I1011 04:52:43.916595 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:43Z","lastTransitionTime":"2025-10-11T04:52:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:44 crc kubenswrapper[4651]: I1011 04:52:44.019520 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:44 crc kubenswrapper[4651]: I1011 04:52:44.019548 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:44 crc kubenswrapper[4651]: I1011 04:52:44.019559 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:44 crc kubenswrapper[4651]: I1011 04:52:44.019574 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:44 crc kubenswrapper[4651]: I1011 04:52:44.019583 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:44Z","lastTransitionTime":"2025-10-11T04:52:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:44 crc kubenswrapper[4651]: I1011 04:52:44.121552 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:44 crc kubenswrapper[4651]: I1011 04:52:44.121595 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:44 crc kubenswrapper[4651]: I1011 04:52:44.121605 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:44 crc kubenswrapper[4651]: I1011 04:52:44.121618 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:44 crc kubenswrapper[4651]: I1011 04:52:44.121627 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:44Z","lastTransitionTime":"2025-10-11T04:52:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:44 crc kubenswrapper[4651]: I1011 04:52:44.223540 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:44 crc kubenswrapper[4651]: I1011 04:52:44.223583 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:44 crc kubenswrapper[4651]: I1011 04:52:44.223595 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:44 crc kubenswrapper[4651]: I1011 04:52:44.223610 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:44 crc kubenswrapper[4651]: I1011 04:52:44.223619 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:44Z","lastTransitionTime":"2025-10-11T04:52:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:44 crc kubenswrapper[4651]: I1011 04:52:44.328112 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:44 crc kubenswrapper[4651]: I1011 04:52:44.328241 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:44 crc kubenswrapper[4651]: I1011 04:52:44.328257 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:44 crc kubenswrapper[4651]: I1011 04:52:44.328284 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:44 crc kubenswrapper[4651]: I1011 04:52:44.328317 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:44Z","lastTransitionTime":"2025-10-11T04:52:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:44 crc kubenswrapper[4651]: I1011 04:52:44.431279 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:44 crc kubenswrapper[4651]: I1011 04:52:44.431353 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:44 crc kubenswrapper[4651]: I1011 04:52:44.431379 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:44 crc kubenswrapper[4651]: I1011 04:52:44.431407 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:44 crc kubenswrapper[4651]: I1011 04:52:44.431429 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:44Z","lastTransitionTime":"2025-10-11T04:52:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:44 crc kubenswrapper[4651]: I1011 04:52:44.534951 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:44 crc kubenswrapper[4651]: I1011 04:52:44.535011 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:44 crc kubenswrapper[4651]: I1011 04:52:44.535027 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:44 crc kubenswrapper[4651]: I1011 04:52:44.535052 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:44 crc kubenswrapper[4651]: I1011 04:52:44.535071 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:44Z","lastTransitionTime":"2025-10-11T04:52:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:44 crc kubenswrapper[4651]: I1011 04:52:44.638311 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:44 crc kubenswrapper[4651]: I1011 04:52:44.638373 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:44 crc kubenswrapper[4651]: I1011 04:52:44.638393 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:44 crc kubenswrapper[4651]: I1011 04:52:44.638416 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:44 crc kubenswrapper[4651]: I1011 04:52:44.638433 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:44Z","lastTransitionTime":"2025-10-11T04:52:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:44 crc kubenswrapper[4651]: I1011 04:52:44.741089 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:44 crc kubenswrapper[4651]: I1011 04:52:44.741128 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:44 crc kubenswrapper[4651]: I1011 04:52:44.741142 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:44 crc kubenswrapper[4651]: I1011 04:52:44.741159 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:44 crc kubenswrapper[4651]: I1011 04:52:44.741169 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:44Z","lastTransitionTime":"2025-10-11T04:52:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:44 crc kubenswrapper[4651]: I1011 04:52:44.844159 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:44 crc kubenswrapper[4651]: I1011 04:52:44.844199 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:44 crc kubenswrapper[4651]: I1011 04:52:44.844208 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:44 crc kubenswrapper[4651]: I1011 04:52:44.844222 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:44 crc kubenswrapper[4651]: I1011 04:52:44.844232 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:44Z","lastTransitionTime":"2025-10-11T04:52:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:44 crc kubenswrapper[4651]: I1011 04:52:44.869246 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 04:52:44 crc kubenswrapper[4651]: E1011 04:52:44.869364 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 04:52:44 crc kubenswrapper[4651]: I1011 04:52:44.946766 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:44 crc kubenswrapper[4651]: I1011 04:52:44.946810 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:44 crc kubenswrapper[4651]: I1011 04:52:44.946846 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:44 crc kubenswrapper[4651]: I1011 04:52:44.946865 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:44 crc kubenswrapper[4651]: I1011 04:52:44.946879 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:44Z","lastTransitionTime":"2025-10-11T04:52:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:45 crc kubenswrapper[4651]: I1011 04:52:45.048600 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:45 crc kubenswrapper[4651]: I1011 04:52:45.048661 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:45 crc kubenswrapper[4651]: I1011 04:52:45.048677 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:45 crc kubenswrapper[4651]: I1011 04:52:45.048754 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:45 crc kubenswrapper[4651]: I1011 04:52:45.048772 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:45Z","lastTransitionTime":"2025-10-11T04:52:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:45 crc kubenswrapper[4651]: I1011 04:52:45.152121 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:45 crc kubenswrapper[4651]: I1011 04:52:45.152176 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:45 crc kubenswrapper[4651]: I1011 04:52:45.152191 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:45 crc kubenswrapper[4651]: I1011 04:52:45.152214 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:45 crc kubenswrapper[4651]: I1011 04:52:45.152230 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:45Z","lastTransitionTime":"2025-10-11T04:52:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:45 crc kubenswrapper[4651]: I1011 04:52:45.255576 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:45 crc kubenswrapper[4651]: I1011 04:52:45.255626 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:45 crc kubenswrapper[4651]: I1011 04:52:45.255637 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:45 crc kubenswrapper[4651]: I1011 04:52:45.255655 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:45 crc kubenswrapper[4651]: I1011 04:52:45.255666 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:45Z","lastTransitionTime":"2025-10-11T04:52:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:45 crc kubenswrapper[4651]: I1011 04:52:45.358043 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:45 crc kubenswrapper[4651]: I1011 04:52:45.358108 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:45 crc kubenswrapper[4651]: I1011 04:52:45.358126 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:45 crc kubenswrapper[4651]: I1011 04:52:45.358152 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:45 crc kubenswrapper[4651]: I1011 04:52:45.358170 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:45Z","lastTransitionTime":"2025-10-11T04:52:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:45 crc kubenswrapper[4651]: I1011 04:52:45.460121 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:45 crc kubenswrapper[4651]: I1011 04:52:45.460170 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:45 crc kubenswrapper[4651]: I1011 04:52:45.460188 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:45 crc kubenswrapper[4651]: I1011 04:52:45.460209 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:45 crc kubenswrapper[4651]: I1011 04:52:45.460223 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:45Z","lastTransitionTime":"2025-10-11T04:52:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:45 crc kubenswrapper[4651]: I1011 04:52:45.563003 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:45 crc kubenswrapper[4651]: I1011 04:52:45.563051 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:45 crc kubenswrapper[4651]: I1011 04:52:45.563065 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:45 crc kubenswrapper[4651]: I1011 04:52:45.563087 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:45 crc kubenswrapper[4651]: I1011 04:52:45.563102 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:45Z","lastTransitionTime":"2025-10-11T04:52:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:45 crc kubenswrapper[4651]: I1011 04:52:45.665924 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:45 crc kubenswrapper[4651]: I1011 04:52:45.665988 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:45 crc kubenswrapper[4651]: I1011 04:52:45.666003 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:45 crc kubenswrapper[4651]: I1011 04:52:45.666025 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:45 crc kubenswrapper[4651]: I1011 04:52:45.666045 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:45Z","lastTransitionTime":"2025-10-11T04:52:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:45 crc kubenswrapper[4651]: I1011 04:52:45.768961 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:45 crc kubenswrapper[4651]: I1011 04:52:45.769019 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:45 crc kubenswrapper[4651]: I1011 04:52:45.769038 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:45 crc kubenswrapper[4651]: I1011 04:52:45.769064 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:45 crc kubenswrapper[4651]: I1011 04:52:45.769082 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:45Z","lastTransitionTime":"2025-10-11T04:52:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:45 crc kubenswrapper[4651]: I1011 04:52:45.868577 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 04:52:45 crc kubenswrapper[4651]: I1011 04:52:45.868626 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tgvv8" Oct 11 04:52:45 crc kubenswrapper[4651]: E1011 04:52:45.868743 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 04:52:45 crc kubenswrapper[4651]: E1011 04:52:45.868998 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tgvv8" podUID="a551fed8-58fb-48ae-88af-8dc0cb48fc30" Oct 11 04:52:45 crc kubenswrapper[4651]: I1011 04:52:45.869561 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 04:52:45 crc kubenswrapper[4651]: E1011 04:52:45.869732 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 04:52:45 crc kubenswrapper[4651]: I1011 04:52:45.871419 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:45 crc kubenswrapper[4651]: I1011 04:52:45.871447 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:45 crc kubenswrapper[4651]: I1011 04:52:45.871457 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:45 crc kubenswrapper[4651]: I1011 04:52:45.871470 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:45 crc kubenswrapper[4651]: I1011 04:52:45.871481 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:45Z","lastTransitionTime":"2025-10-11T04:52:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:45 crc kubenswrapper[4651]: I1011 04:52:45.974885 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:45 crc kubenswrapper[4651]: I1011 04:52:45.974947 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:45 crc kubenswrapper[4651]: I1011 04:52:45.974964 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:45 crc kubenswrapper[4651]: I1011 04:52:45.974991 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:45 crc kubenswrapper[4651]: I1011 04:52:45.975008 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:45Z","lastTransitionTime":"2025-10-11T04:52:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:46 crc kubenswrapper[4651]: I1011 04:52:46.078193 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:46 crc kubenswrapper[4651]: I1011 04:52:46.078227 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:46 crc kubenswrapper[4651]: I1011 04:52:46.078237 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:46 crc kubenswrapper[4651]: I1011 04:52:46.078254 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:46 crc kubenswrapper[4651]: I1011 04:52:46.078265 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:46Z","lastTransitionTime":"2025-10-11T04:52:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:46 crc kubenswrapper[4651]: I1011 04:52:46.180895 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:46 crc kubenswrapper[4651]: I1011 04:52:46.180944 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:46 crc kubenswrapper[4651]: I1011 04:52:46.180959 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:46 crc kubenswrapper[4651]: I1011 04:52:46.180981 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:46 crc kubenswrapper[4651]: I1011 04:52:46.180995 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:46Z","lastTransitionTime":"2025-10-11T04:52:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:46 crc kubenswrapper[4651]: I1011 04:52:46.283998 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:46 crc kubenswrapper[4651]: I1011 04:52:46.284057 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:46 crc kubenswrapper[4651]: I1011 04:52:46.284072 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:46 crc kubenswrapper[4651]: I1011 04:52:46.284092 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:46 crc kubenswrapper[4651]: I1011 04:52:46.284105 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:46Z","lastTransitionTime":"2025-10-11T04:52:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:46 crc kubenswrapper[4651]: I1011 04:52:46.386612 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:46 crc kubenswrapper[4651]: I1011 04:52:46.386656 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:46 crc kubenswrapper[4651]: I1011 04:52:46.386699 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:46 crc kubenswrapper[4651]: I1011 04:52:46.386720 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:46 crc kubenswrapper[4651]: I1011 04:52:46.386734 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:46Z","lastTransitionTime":"2025-10-11T04:52:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:46 crc kubenswrapper[4651]: I1011 04:52:46.489238 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:46 crc kubenswrapper[4651]: I1011 04:52:46.489285 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:46 crc kubenswrapper[4651]: I1011 04:52:46.489303 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:46 crc kubenswrapper[4651]: I1011 04:52:46.489326 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:46 crc kubenswrapper[4651]: I1011 04:52:46.489343 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:46Z","lastTransitionTime":"2025-10-11T04:52:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:46 crc kubenswrapper[4651]: I1011 04:52:46.592084 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:46 crc kubenswrapper[4651]: I1011 04:52:46.592129 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:46 crc kubenswrapper[4651]: I1011 04:52:46.592145 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:46 crc kubenswrapper[4651]: I1011 04:52:46.592168 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:46 crc kubenswrapper[4651]: I1011 04:52:46.592184 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:46Z","lastTransitionTime":"2025-10-11T04:52:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:46 crc kubenswrapper[4651]: I1011 04:52:46.694563 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:46 crc kubenswrapper[4651]: I1011 04:52:46.694628 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:46 crc kubenswrapper[4651]: I1011 04:52:46.694639 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:46 crc kubenswrapper[4651]: I1011 04:52:46.694651 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:46 crc kubenswrapper[4651]: I1011 04:52:46.694661 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:46Z","lastTransitionTime":"2025-10-11T04:52:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:46 crc kubenswrapper[4651]: I1011 04:52:46.797060 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:46 crc kubenswrapper[4651]: I1011 04:52:46.797112 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:46 crc kubenswrapper[4651]: I1011 04:52:46.797126 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:46 crc kubenswrapper[4651]: I1011 04:52:46.797143 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:46 crc kubenswrapper[4651]: I1011 04:52:46.797157 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:46Z","lastTransitionTime":"2025-10-11T04:52:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:46 crc kubenswrapper[4651]: I1011 04:52:46.869383 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 04:52:46 crc kubenswrapper[4651]: E1011 04:52:46.869563 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 04:52:46 crc kubenswrapper[4651]: I1011 04:52:46.899725 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:46 crc kubenswrapper[4651]: I1011 04:52:46.899774 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:46 crc kubenswrapper[4651]: I1011 04:52:46.899790 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:46 crc kubenswrapper[4651]: I1011 04:52:46.899814 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:46 crc kubenswrapper[4651]: I1011 04:52:46.899867 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:46Z","lastTransitionTime":"2025-10-11T04:52:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:47 crc kubenswrapper[4651]: I1011 04:52:47.003463 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:47 crc kubenswrapper[4651]: I1011 04:52:47.003542 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:47 crc kubenswrapper[4651]: I1011 04:52:47.003560 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:47 crc kubenswrapper[4651]: I1011 04:52:47.003586 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:47 crc kubenswrapper[4651]: I1011 04:52:47.003606 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:47Z","lastTransitionTime":"2025-10-11T04:52:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:47 crc kubenswrapper[4651]: I1011 04:52:47.043346 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:47 crc kubenswrapper[4651]: I1011 04:52:47.043419 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:47 crc kubenswrapper[4651]: I1011 04:52:47.043440 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:47 crc kubenswrapper[4651]: I1011 04:52:47.043474 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:47 crc kubenswrapper[4651]: I1011 04:52:47.043501 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:47Z","lastTransitionTime":"2025-10-11T04:52:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:47 crc kubenswrapper[4651]: E1011 04:52:47.067345 4651 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:47Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:47Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a821a5c3-63e0-43db-82e0-e9c6e98ead52\\\",\\\"systemUUID\\\":\\\"f1c4ea71-0c28-43a7-99a4-e27ff72e186a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:47Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:47 crc kubenswrapper[4651]: I1011 04:52:47.071922 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:47 crc kubenswrapper[4651]: I1011 04:52:47.071971 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:47 crc kubenswrapper[4651]: I1011 04:52:47.071986 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:47 crc kubenswrapper[4651]: I1011 04:52:47.072004 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:47 crc kubenswrapper[4651]: I1011 04:52:47.072015 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:47Z","lastTransitionTime":"2025-10-11T04:52:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:47 crc kubenswrapper[4651]: E1011 04:52:47.089468 4651 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:47Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:47Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a821a5c3-63e0-43db-82e0-e9c6e98ead52\\\",\\\"systemUUID\\\":\\\"f1c4ea71-0c28-43a7-99a4-e27ff72e186a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:47Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:47 crc kubenswrapper[4651]: I1011 04:52:47.094102 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:47 crc kubenswrapper[4651]: I1011 04:52:47.094170 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:47 crc kubenswrapper[4651]: I1011 04:52:47.094194 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:47 crc kubenswrapper[4651]: I1011 04:52:47.094230 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:47 crc kubenswrapper[4651]: I1011 04:52:47.094253 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:47Z","lastTransitionTime":"2025-10-11T04:52:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:47 crc kubenswrapper[4651]: E1011 04:52:47.115615 4651 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:47Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:47Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a821a5c3-63e0-43db-82e0-e9c6e98ead52\\\",\\\"systemUUID\\\":\\\"f1c4ea71-0c28-43a7-99a4-e27ff72e186a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:47Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:47 crc kubenswrapper[4651]: I1011 04:52:47.120223 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:47 crc kubenswrapper[4651]: I1011 04:52:47.120283 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:47 crc kubenswrapper[4651]: I1011 04:52:47.120302 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:47 crc kubenswrapper[4651]: I1011 04:52:47.120355 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:47 crc kubenswrapper[4651]: I1011 04:52:47.120372 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:47Z","lastTransitionTime":"2025-10-11T04:52:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:47 crc kubenswrapper[4651]: E1011 04:52:47.137810 4651 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:47Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:47Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a821a5c3-63e0-43db-82e0-e9c6e98ead52\\\",\\\"systemUUID\\\":\\\"f1c4ea71-0c28-43a7-99a4-e27ff72e186a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:47Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:47 crc kubenswrapper[4651]: I1011 04:52:47.142797 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:47 crc kubenswrapper[4651]: I1011 04:52:47.142885 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:47 crc kubenswrapper[4651]: I1011 04:52:47.142902 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:47 crc kubenswrapper[4651]: I1011 04:52:47.142924 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:47 crc kubenswrapper[4651]: I1011 04:52:47.142939 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:47Z","lastTransitionTime":"2025-10-11T04:52:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:47 crc kubenswrapper[4651]: E1011 04:52:47.159690 4651 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:47Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T04:52:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T04:52:47Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a821a5c3-63e0-43db-82e0-e9c6e98ead52\\\",\\\"systemUUID\\\":\\\"f1c4ea71-0c28-43a7-99a4-e27ff72e186a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T04:52:47Z is after 2025-08-24T17:21:41Z" Oct 11 04:52:47 crc kubenswrapper[4651]: E1011 04:52:47.159886 4651 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 11 04:52:47 crc kubenswrapper[4651]: I1011 04:52:47.162151 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:47 crc kubenswrapper[4651]: I1011 04:52:47.162225 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:47 crc kubenswrapper[4651]: I1011 04:52:47.162243 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:47 crc kubenswrapper[4651]: I1011 04:52:47.162271 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:47 crc kubenswrapper[4651]: I1011 04:52:47.162289 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:47Z","lastTransitionTime":"2025-10-11T04:52:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:47 crc kubenswrapper[4651]: I1011 04:52:47.264872 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:47 crc kubenswrapper[4651]: I1011 04:52:47.264938 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:47 crc kubenswrapper[4651]: I1011 04:52:47.264950 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:47 crc kubenswrapper[4651]: I1011 04:52:47.264994 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:47 crc kubenswrapper[4651]: I1011 04:52:47.265006 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:47Z","lastTransitionTime":"2025-10-11T04:52:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:47 crc kubenswrapper[4651]: I1011 04:52:47.368152 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:47 crc kubenswrapper[4651]: I1011 04:52:47.368191 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:47 crc kubenswrapper[4651]: I1011 04:52:47.368202 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:47 crc kubenswrapper[4651]: I1011 04:52:47.368239 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:47 crc kubenswrapper[4651]: I1011 04:52:47.368249 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:47Z","lastTransitionTime":"2025-10-11T04:52:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:47 crc kubenswrapper[4651]: I1011 04:52:47.475405 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:47 crc kubenswrapper[4651]: I1011 04:52:47.475549 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:47 crc kubenswrapper[4651]: I1011 04:52:47.475694 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:47 crc kubenswrapper[4651]: I1011 04:52:47.475730 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:47 crc kubenswrapper[4651]: I1011 04:52:47.475757 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:47Z","lastTransitionTime":"2025-10-11T04:52:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:47 crc kubenswrapper[4651]: I1011 04:52:47.579043 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:47 crc kubenswrapper[4651]: I1011 04:52:47.579092 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:47 crc kubenswrapper[4651]: I1011 04:52:47.579107 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:47 crc kubenswrapper[4651]: I1011 04:52:47.579125 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:47 crc kubenswrapper[4651]: I1011 04:52:47.579139 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:47Z","lastTransitionTime":"2025-10-11T04:52:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:47 crc kubenswrapper[4651]: I1011 04:52:47.682042 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:47 crc kubenswrapper[4651]: I1011 04:52:47.682121 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:47 crc kubenswrapper[4651]: I1011 04:52:47.682144 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:47 crc kubenswrapper[4651]: I1011 04:52:47.682173 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:47 crc kubenswrapper[4651]: I1011 04:52:47.682199 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:47Z","lastTransitionTime":"2025-10-11T04:52:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:47 crc kubenswrapper[4651]: I1011 04:52:47.784556 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:47 crc kubenswrapper[4651]: I1011 04:52:47.784585 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:47 crc kubenswrapper[4651]: I1011 04:52:47.784593 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:47 crc kubenswrapper[4651]: I1011 04:52:47.784607 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:47 crc kubenswrapper[4651]: I1011 04:52:47.784615 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:47Z","lastTransitionTime":"2025-10-11T04:52:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:47 crc kubenswrapper[4651]: I1011 04:52:47.869584 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 04:52:47 crc kubenswrapper[4651]: I1011 04:52:47.869627 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tgvv8" Oct 11 04:52:47 crc kubenswrapper[4651]: E1011 04:52:47.869760 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 04:52:47 crc kubenswrapper[4651]: I1011 04:52:47.869810 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 04:52:47 crc kubenswrapper[4651]: E1011 04:52:47.870057 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tgvv8" podUID="a551fed8-58fb-48ae-88af-8dc0cb48fc30" Oct 11 04:52:47 crc kubenswrapper[4651]: E1011 04:52:47.870250 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 04:52:47 crc kubenswrapper[4651]: I1011 04:52:47.887165 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:47 crc kubenswrapper[4651]: I1011 04:52:47.887212 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:47 crc kubenswrapper[4651]: I1011 04:52:47.887223 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:47 crc kubenswrapper[4651]: I1011 04:52:47.887244 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:47 crc kubenswrapper[4651]: I1011 04:52:47.887257 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:47Z","lastTransitionTime":"2025-10-11T04:52:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:47 crc kubenswrapper[4651]: I1011 04:52:47.990230 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:47 crc kubenswrapper[4651]: I1011 04:52:47.990280 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:47 crc kubenswrapper[4651]: I1011 04:52:47.990292 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:47 crc kubenswrapper[4651]: I1011 04:52:47.990311 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:47 crc kubenswrapper[4651]: I1011 04:52:47.990323 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:47Z","lastTransitionTime":"2025-10-11T04:52:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:48 crc kubenswrapper[4651]: I1011 04:52:48.092631 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:48 crc kubenswrapper[4651]: I1011 04:52:48.092662 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:48 crc kubenswrapper[4651]: I1011 04:52:48.092672 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:48 crc kubenswrapper[4651]: I1011 04:52:48.092687 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:48 crc kubenswrapper[4651]: I1011 04:52:48.092696 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:48Z","lastTransitionTime":"2025-10-11T04:52:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:48 crc kubenswrapper[4651]: I1011 04:52:48.194967 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:48 crc kubenswrapper[4651]: I1011 04:52:48.195018 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:48 crc kubenswrapper[4651]: I1011 04:52:48.195031 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:48 crc kubenswrapper[4651]: I1011 04:52:48.195050 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:48 crc kubenswrapper[4651]: I1011 04:52:48.195065 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:48Z","lastTransitionTime":"2025-10-11T04:52:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:48 crc kubenswrapper[4651]: I1011 04:52:48.297400 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:48 crc kubenswrapper[4651]: I1011 04:52:48.297440 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:48 crc kubenswrapper[4651]: I1011 04:52:48.297449 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:48 crc kubenswrapper[4651]: I1011 04:52:48.297463 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:48 crc kubenswrapper[4651]: I1011 04:52:48.297473 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:48Z","lastTransitionTime":"2025-10-11T04:52:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:48 crc kubenswrapper[4651]: I1011 04:52:48.399155 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:48 crc kubenswrapper[4651]: I1011 04:52:48.399228 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:48 crc kubenswrapper[4651]: I1011 04:52:48.399255 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:48 crc kubenswrapper[4651]: I1011 04:52:48.399284 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:48 crc kubenswrapper[4651]: I1011 04:52:48.399309 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:48Z","lastTransitionTime":"2025-10-11T04:52:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:48 crc kubenswrapper[4651]: I1011 04:52:48.501738 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:48 crc kubenswrapper[4651]: I1011 04:52:48.501770 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:48 crc kubenswrapper[4651]: I1011 04:52:48.501778 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:48 crc kubenswrapper[4651]: I1011 04:52:48.501791 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:48 crc kubenswrapper[4651]: I1011 04:52:48.501799 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:48Z","lastTransitionTime":"2025-10-11T04:52:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:48 crc kubenswrapper[4651]: I1011 04:52:48.604389 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:48 crc kubenswrapper[4651]: I1011 04:52:48.604428 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:48 crc kubenswrapper[4651]: I1011 04:52:48.604437 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:48 crc kubenswrapper[4651]: I1011 04:52:48.604451 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:48 crc kubenswrapper[4651]: I1011 04:52:48.604461 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:48Z","lastTransitionTime":"2025-10-11T04:52:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:48 crc kubenswrapper[4651]: I1011 04:52:48.706985 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:48 crc kubenswrapper[4651]: I1011 04:52:48.707054 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:48 crc kubenswrapper[4651]: I1011 04:52:48.707074 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:48 crc kubenswrapper[4651]: I1011 04:52:48.707099 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:48 crc kubenswrapper[4651]: I1011 04:52:48.707117 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:48Z","lastTransitionTime":"2025-10-11T04:52:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:48 crc kubenswrapper[4651]: I1011 04:52:48.809709 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:48 crc kubenswrapper[4651]: I1011 04:52:48.809777 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:48 crc kubenswrapper[4651]: I1011 04:52:48.809803 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:48 crc kubenswrapper[4651]: I1011 04:52:48.809863 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:48 crc kubenswrapper[4651]: I1011 04:52:48.809889 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:48Z","lastTransitionTime":"2025-10-11T04:52:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:48 crc kubenswrapper[4651]: I1011 04:52:48.868542 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 04:52:48 crc kubenswrapper[4651]: E1011 04:52:48.869225 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 04:52:48 crc kubenswrapper[4651]: I1011 04:52:48.912710 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:48 crc kubenswrapper[4651]: I1011 04:52:48.912775 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:48 crc kubenswrapper[4651]: I1011 04:52:48.912791 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:48 crc kubenswrapper[4651]: I1011 04:52:48.912816 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:48 crc kubenswrapper[4651]: I1011 04:52:48.912854 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:48Z","lastTransitionTime":"2025-10-11T04:52:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:49 crc kubenswrapper[4651]: I1011 04:52:49.015752 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:49 crc kubenswrapper[4651]: I1011 04:52:49.015809 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:49 crc kubenswrapper[4651]: I1011 04:52:49.015837 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:49 crc kubenswrapper[4651]: I1011 04:52:49.015856 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:49 crc kubenswrapper[4651]: I1011 04:52:49.015869 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:49Z","lastTransitionTime":"2025-10-11T04:52:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:49 crc kubenswrapper[4651]: I1011 04:52:49.119209 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:49 crc kubenswrapper[4651]: I1011 04:52:49.119286 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:49 crc kubenswrapper[4651]: I1011 04:52:49.119311 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:49 crc kubenswrapper[4651]: I1011 04:52:49.119340 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:49 crc kubenswrapper[4651]: I1011 04:52:49.119361 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:49Z","lastTransitionTime":"2025-10-11T04:52:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:49 crc kubenswrapper[4651]: I1011 04:52:49.222323 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:49 crc kubenswrapper[4651]: I1011 04:52:49.222390 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:49 crc kubenswrapper[4651]: I1011 04:52:49.222407 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:49 crc kubenswrapper[4651]: I1011 04:52:49.222433 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:49 crc kubenswrapper[4651]: I1011 04:52:49.222452 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:49Z","lastTransitionTime":"2025-10-11T04:52:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:49 crc kubenswrapper[4651]: I1011 04:52:49.326808 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:49 crc kubenswrapper[4651]: I1011 04:52:49.326909 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:49 crc kubenswrapper[4651]: I1011 04:52:49.326928 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:49 crc kubenswrapper[4651]: I1011 04:52:49.326953 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:49 crc kubenswrapper[4651]: I1011 04:52:49.326969 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:49Z","lastTransitionTime":"2025-10-11T04:52:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:49 crc kubenswrapper[4651]: I1011 04:52:49.430116 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:49 crc kubenswrapper[4651]: I1011 04:52:49.430175 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:49 crc kubenswrapper[4651]: I1011 04:52:49.430192 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:49 crc kubenswrapper[4651]: I1011 04:52:49.430217 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:49 crc kubenswrapper[4651]: I1011 04:52:49.430237 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:49Z","lastTransitionTime":"2025-10-11T04:52:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:49 crc kubenswrapper[4651]: I1011 04:52:49.533685 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:49 crc kubenswrapper[4651]: I1011 04:52:49.533745 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:49 crc kubenswrapper[4651]: I1011 04:52:49.533762 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:49 crc kubenswrapper[4651]: I1011 04:52:49.533788 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:49 crc kubenswrapper[4651]: I1011 04:52:49.533807 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:49Z","lastTransitionTime":"2025-10-11T04:52:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:49 crc kubenswrapper[4651]: I1011 04:52:49.637079 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:49 crc kubenswrapper[4651]: I1011 04:52:49.637144 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:49 crc kubenswrapper[4651]: I1011 04:52:49.637164 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:49 crc kubenswrapper[4651]: I1011 04:52:49.637191 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:49 crc kubenswrapper[4651]: I1011 04:52:49.637209 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:49Z","lastTransitionTime":"2025-10-11T04:52:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:49 crc kubenswrapper[4651]: I1011 04:52:49.739981 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:49 crc kubenswrapper[4651]: I1011 04:52:49.740046 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:49 crc kubenswrapper[4651]: I1011 04:52:49.740066 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:49 crc kubenswrapper[4651]: I1011 04:52:49.740090 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:49 crc kubenswrapper[4651]: I1011 04:52:49.740107 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:49Z","lastTransitionTime":"2025-10-11T04:52:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:49 crc kubenswrapper[4651]: I1011 04:52:49.842763 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:49 crc kubenswrapper[4651]: I1011 04:52:49.842858 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:49 crc kubenswrapper[4651]: I1011 04:52:49.842880 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:49 crc kubenswrapper[4651]: I1011 04:52:49.842907 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:49 crc kubenswrapper[4651]: I1011 04:52:49.842926 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:49Z","lastTransitionTime":"2025-10-11T04:52:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:49 crc kubenswrapper[4651]: I1011 04:52:49.868772 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 04:52:49 crc kubenswrapper[4651]: I1011 04:52:49.868885 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tgvv8" Oct 11 04:52:49 crc kubenswrapper[4651]: E1011 04:52:49.869143 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 04:52:49 crc kubenswrapper[4651]: I1011 04:52:49.869187 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 04:52:49 crc kubenswrapper[4651]: E1011 04:52:49.870313 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 04:52:49 crc kubenswrapper[4651]: E1011 04:52:49.870000 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tgvv8" podUID="a551fed8-58fb-48ae-88af-8dc0cb48fc30" Oct 11 04:52:49 crc kubenswrapper[4651]: I1011 04:52:49.945295 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:49 crc kubenswrapper[4651]: I1011 04:52:49.945579 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:49 crc kubenswrapper[4651]: I1011 04:52:49.946073 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:49 crc kubenswrapper[4651]: I1011 04:52:49.946227 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:49 crc kubenswrapper[4651]: I1011 04:52:49.946349 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:49Z","lastTransitionTime":"2025-10-11T04:52:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:49 crc kubenswrapper[4651]: I1011 04:52:49.962869 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=69.962764625 podStartE2EDuration="1m9.962764625s" podCreationTimestamp="2025-10-11 04:51:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 04:52:49.961995175 +0000 UTC m=+90.858228011" watchObservedRunningTime="2025-10-11 04:52:49.962764625 +0000 UTC m=+90.858997501" Oct 11 04:52:49 crc kubenswrapper[4651]: I1011 04:52:49.980578 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=69.980560575 podStartE2EDuration="1m9.980560575s" podCreationTimestamp="2025-10-11 04:51:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 04:52:49.980156485 +0000 UTC m=+90.876389321" watchObservedRunningTime="2025-10-11 04:52:49.980560575 +0000 UTC m=+90.876793381" Oct 11 04:52:50 crc kubenswrapper[4651]: I1011 04:52:50.011838 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=34.011797716 podStartE2EDuration="34.011797716s" podCreationTimestamp="2025-10-11 04:52:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 04:52:49.997167606 +0000 UTC m=+90.893400422" watchObservedRunningTime="2025-10-11 04:52:50.011797716 +0000 UTC m=+90.908030532" Oct 11 04:52:50 crc kubenswrapper[4651]: I1011 04:52:50.046137 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podStartSLOduration=71.046116375 podStartE2EDuration="1m11.046116375s" podCreationTimestamp="2025-10-11 04:51:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 04:52:50.027562895 +0000 UTC m=+90.923795721" watchObservedRunningTime="2025-10-11 04:52:50.046116375 +0000 UTC m=+90.942349171" Oct 11 04:52:50 crc kubenswrapper[4651]: I1011 04:52:50.048626 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:50 crc kubenswrapper[4651]: I1011 04:52:50.048791 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:50 crc kubenswrapper[4651]: I1011 04:52:50.048898 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:50 crc kubenswrapper[4651]: I1011 04:52:50.048991 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:50 crc kubenswrapper[4651]: I1011 04:52:50.049073 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:50Z","lastTransitionTime":"2025-10-11T04:52:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:50 crc kubenswrapper[4651]: I1011 04:52:50.058210 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-wz4hw" podStartSLOduration=71.05818929 podStartE2EDuration="1m11.05818929s" podCreationTimestamp="2025-10-11 04:51:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 04:52:50.046538215 +0000 UTC m=+90.942771011" watchObservedRunningTime="2025-10-11 04:52:50.05818929 +0000 UTC m=+90.954422106" Oct 11 04:52:50 crc kubenswrapper[4651]: I1011 04:52:50.069343 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-phsgk" podStartSLOduration=71.069323992 podStartE2EDuration="1m11.069323992s" podCreationTimestamp="2025-10-11 04:51:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 04:52:50.068568863 +0000 UTC m=+90.964801669" watchObservedRunningTime="2025-10-11 04:52:50.069323992 +0000 UTC m=+90.965556798" Oct 11 04:52:50 crc kubenswrapper[4651]: I1011 04:52:50.069895 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=14.069884206 podStartE2EDuration="14.069884206s" podCreationTimestamp="2025-10-11 04:52:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 04:52:50.058904758 +0000 UTC m=+90.955137564" watchObservedRunningTime="2025-10-11 04:52:50.069884206 +0000 UTC m=+90.966117012" Oct 11 04:52:50 crc kubenswrapper[4651]: I1011 04:52:50.147210 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-kwhmr" podStartSLOduration=71.147191973 podStartE2EDuration="1m11.147191973s" podCreationTimestamp="2025-10-11 04:51:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 04:52:50.122863117 +0000 UTC m=+91.019095923" watchObservedRunningTime="2025-10-11 04:52:50.147191973 +0000 UTC m=+91.043424769" Oct 11 04:52:50 crc kubenswrapper[4651]: I1011 04:52:50.151310 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:50 crc kubenswrapper[4651]: I1011 04:52:50.151336 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:50 crc kubenswrapper[4651]: I1011 04:52:50.151344 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:50 crc kubenswrapper[4651]: I1011 04:52:50.151355 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:50 crc kubenswrapper[4651]: I1011 04:52:50.151364 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:50Z","lastTransitionTime":"2025-10-11T04:52:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:50 crc kubenswrapper[4651]: I1011 04:52:50.178862 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-pgwvb" podStartSLOduration=71.178845774 podStartE2EDuration="1m11.178845774s" podCreationTimestamp="2025-10-11 04:51:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 04:52:50.177973452 +0000 UTC m=+91.074206268" watchObservedRunningTime="2025-10-11 04:52:50.178845774 +0000 UTC m=+91.075078570" Oct 11 04:52:50 crc kubenswrapper[4651]: I1011 04:52:50.196539 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qsgwc" podStartSLOduration=70.196525361 podStartE2EDuration="1m10.196525361s" podCreationTimestamp="2025-10-11 04:51:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 04:52:50.19606479 +0000 UTC m=+91.092297596" watchObservedRunningTime="2025-10-11 04:52:50.196525361 +0000 UTC m=+91.092758157" Oct 11 04:52:50 crc kubenswrapper[4651]: I1011 04:52:50.254127 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:50 crc kubenswrapper[4651]: I1011 04:52:50.254361 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:50 crc kubenswrapper[4651]: I1011 04:52:50.254447 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:50 crc kubenswrapper[4651]: I1011 04:52:50.254535 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:50 crc kubenswrapper[4651]: I1011 04:52:50.254605 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:50Z","lastTransitionTime":"2025-10-11T04:52:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:50 crc kubenswrapper[4651]: I1011 04:52:50.357636 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:50 crc kubenswrapper[4651]: I1011 04:52:50.357710 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:50 crc kubenswrapper[4651]: I1011 04:52:50.357722 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:50 crc kubenswrapper[4651]: I1011 04:52:50.357741 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:50 crc kubenswrapper[4651]: I1011 04:52:50.357755 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:50Z","lastTransitionTime":"2025-10-11T04:52:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:50 crc kubenswrapper[4651]: I1011 04:52:50.460555 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:50 crc kubenswrapper[4651]: I1011 04:52:50.460591 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:50 crc kubenswrapper[4651]: I1011 04:52:50.460602 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:50 crc kubenswrapper[4651]: I1011 04:52:50.460620 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:50 crc kubenswrapper[4651]: I1011 04:52:50.460631 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:50Z","lastTransitionTime":"2025-10-11T04:52:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:50 crc kubenswrapper[4651]: I1011 04:52:50.563100 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:50 crc kubenswrapper[4651]: I1011 04:52:50.563185 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:50 crc kubenswrapper[4651]: I1011 04:52:50.563197 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:50 crc kubenswrapper[4651]: I1011 04:52:50.563213 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:50 crc kubenswrapper[4651]: I1011 04:52:50.563223 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:50Z","lastTransitionTime":"2025-10-11T04:52:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:50 crc kubenswrapper[4651]: I1011 04:52:50.666958 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:50 crc kubenswrapper[4651]: I1011 04:52:50.667007 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:50 crc kubenswrapper[4651]: I1011 04:52:50.667016 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:50 crc kubenswrapper[4651]: I1011 04:52:50.667034 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:50 crc kubenswrapper[4651]: I1011 04:52:50.667043 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:50Z","lastTransitionTime":"2025-10-11T04:52:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:50 crc kubenswrapper[4651]: I1011 04:52:50.771082 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:50 crc kubenswrapper[4651]: I1011 04:52:50.771150 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:50 crc kubenswrapper[4651]: I1011 04:52:50.771169 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:50 crc kubenswrapper[4651]: I1011 04:52:50.771198 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:50 crc kubenswrapper[4651]: I1011 04:52:50.771215 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:50Z","lastTransitionTime":"2025-10-11T04:52:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:50 crc kubenswrapper[4651]: I1011 04:52:50.868976 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 04:52:50 crc kubenswrapper[4651]: E1011 04:52:50.870454 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 04:52:50 crc kubenswrapper[4651]: I1011 04:52:50.873609 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:50 crc kubenswrapper[4651]: I1011 04:52:50.873655 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:50 crc kubenswrapper[4651]: I1011 04:52:50.873669 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:50 crc kubenswrapper[4651]: I1011 04:52:50.873689 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:50 crc kubenswrapper[4651]: I1011 04:52:50.873704 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:50Z","lastTransitionTime":"2025-10-11T04:52:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:50 crc kubenswrapper[4651]: I1011 04:52:50.979499 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:50 crc kubenswrapper[4651]: I1011 04:52:50.979557 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:50 crc kubenswrapper[4651]: I1011 04:52:50.979574 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:50 crc kubenswrapper[4651]: I1011 04:52:50.979599 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:50 crc kubenswrapper[4651]: I1011 04:52:50.979623 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:50Z","lastTransitionTime":"2025-10-11T04:52:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:51 crc kubenswrapper[4651]: I1011 04:52:51.082477 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:51 crc kubenswrapper[4651]: I1011 04:52:51.082525 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:51 crc kubenswrapper[4651]: I1011 04:52:51.082541 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:51 crc kubenswrapper[4651]: I1011 04:52:51.082564 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:51 crc kubenswrapper[4651]: I1011 04:52:51.082581 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:51Z","lastTransitionTime":"2025-10-11T04:52:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:51 crc kubenswrapper[4651]: I1011 04:52:51.185787 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:51 crc kubenswrapper[4651]: I1011 04:52:51.185854 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:51 crc kubenswrapper[4651]: I1011 04:52:51.185870 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:51 crc kubenswrapper[4651]: I1011 04:52:51.185891 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:51 crc kubenswrapper[4651]: I1011 04:52:51.185905 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:51Z","lastTransitionTime":"2025-10-11T04:52:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:51 crc kubenswrapper[4651]: I1011 04:52:51.289356 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:51 crc kubenswrapper[4651]: I1011 04:52:51.289426 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:51 crc kubenswrapper[4651]: I1011 04:52:51.289453 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:51 crc kubenswrapper[4651]: I1011 04:52:51.289483 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:51 crc kubenswrapper[4651]: I1011 04:52:51.289505 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:51Z","lastTransitionTime":"2025-10-11T04:52:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:51 crc kubenswrapper[4651]: I1011 04:52:51.392677 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:51 crc kubenswrapper[4651]: I1011 04:52:51.392749 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:51 crc kubenswrapper[4651]: I1011 04:52:51.392774 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:51 crc kubenswrapper[4651]: I1011 04:52:51.392803 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:51 crc kubenswrapper[4651]: I1011 04:52:51.392861 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:51Z","lastTransitionTime":"2025-10-11T04:52:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:51 crc kubenswrapper[4651]: I1011 04:52:51.497074 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:51 crc kubenswrapper[4651]: I1011 04:52:51.497128 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:51 crc kubenswrapper[4651]: I1011 04:52:51.497145 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:51 crc kubenswrapper[4651]: I1011 04:52:51.497168 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:51 crc kubenswrapper[4651]: I1011 04:52:51.497187 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:51Z","lastTransitionTime":"2025-10-11T04:52:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:51 crc kubenswrapper[4651]: I1011 04:52:51.600649 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:51 crc kubenswrapper[4651]: I1011 04:52:51.600714 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:51 crc kubenswrapper[4651]: I1011 04:52:51.600737 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:51 crc kubenswrapper[4651]: I1011 04:52:51.600763 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:51 crc kubenswrapper[4651]: I1011 04:52:51.600783 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:51Z","lastTransitionTime":"2025-10-11T04:52:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:51 crc kubenswrapper[4651]: I1011 04:52:51.704190 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:51 crc kubenswrapper[4651]: I1011 04:52:51.704247 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:51 crc kubenswrapper[4651]: I1011 04:52:51.704265 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:51 crc kubenswrapper[4651]: I1011 04:52:51.704290 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:51 crc kubenswrapper[4651]: I1011 04:52:51.704308 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:51Z","lastTransitionTime":"2025-10-11T04:52:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:51 crc kubenswrapper[4651]: I1011 04:52:51.806912 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:51 crc kubenswrapper[4651]: I1011 04:52:51.806969 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:51 crc kubenswrapper[4651]: I1011 04:52:51.806987 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:51 crc kubenswrapper[4651]: I1011 04:52:51.807011 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:51 crc kubenswrapper[4651]: I1011 04:52:51.807030 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:51Z","lastTransitionTime":"2025-10-11T04:52:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:51 crc kubenswrapper[4651]: I1011 04:52:51.869101 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 04:52:51 crc kubenswrapper[4651]: I1011 04:52:51.869196 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 04:52:51 crc kubenswrapper[4651]: E1011 04:52:51.869302 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 04:52:51 crc kubenswrapper[4651]: I1011 04:52:51.869196 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tgvv8" Oct 11 04:52:51 crc kubenswrapper[4651]: E1011 04:52:51.869485 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 04:52:51 crc kubenswrapper[4651]: E1011 04:52:51.869534 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tgvv8" podUID="a551fed8-58fb-48ae-88af-8dc0cb48fc30" Oct 11 04:52:51 crc kubenswrapper[4651]: I1011 04:52:51.909662 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:51 crc kubenswrapper[4651]: I1011 04:52:51.909761 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:51 crc kubenswrapper[4651]: I1011 04:52:51.909779 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:51 crc kubenswrapper[4651]: I1011 04:52:51.909803 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:51 crc kubenswrapper[4651]: I1011 04:52:51.909843 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:51Z","lastTransitionTime":"2025-10-11T04:52:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:52 crc kubenswrapper[4651]: I1011 04:52:52.012969 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:52 crc kubenswrapper[4651]: I1011 04:52:52.013034 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:52 crc kubenswrapper[4651]: I1011 04:52:52.013056 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:52 crc kubenswrapper[4651]: I1011 04:52:52.013079 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:52 crc kubenswrapper[4651]: I1011 04:52:52.013099 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:52Z","lastTransitionTime":"2025-10-11T04:52:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:52 crc kubenswrapper[4651]: I1011 04:52:52.116455 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:52 crc kubenswrapper[4651]: I1011 04:52:52.116505 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:52 crc kubenswrapper[4651]: I1011 04:52:52.116517 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:52 crc kubenswrapper[4651]: I1011 04:52:52.116540 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:52 crc kubenswrapper[4651]: I1011 04:52:52.116557 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:52Z","lastTransitionTime":"2025-10-11T04:52:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:52 crc kubenswrapper[4651]: I1011 04:52:52.218871 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:52 crc kubenswrapper[4651]: I1011 04:52:52.218955 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:52 crc kubenswrapper[4651]: I1011 04:52:52.218996 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:52 crc kubenswrapper[4651]: I1011 04:52:52.219018 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:52 crc kubenswrapper[4651]: I1011 04:52:52.219032 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:52Z","lastTransitionTime":"2025-10-11T04:52:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:52 crc kubenswrapper[4651]: I1011 04:52:52.320805 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:52 crc kubenswrapper[4651]: I1011 04:52:52.320845 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:52 crc kubenswrapper[4651]: I1011 04:52:52.320853 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:52 crc kubenswrapper[4651]: I1011 04:52:52.320866 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:52 crc kubenswrapper[4651]: I1011 04:52:52.320875 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:52Z","lastTransitionTime":"2025-10-11T04:52:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:52 crc kubenswrapper[4651]: I1011 04:52:52.422991 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:52 crc kubenswrapper[4651]: I1011 04:52:52.423033 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:52 crc kubenswrapper[4651]: I1011 04:52:52.423042 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:52 crc kubenswrapper[4651]: I1011 04:52:52.423057 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:52 crc kubenswrapper[4651]: I1011 04:52:52.423068 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:52Z","lastTransitionTime":"2025-10-11T04:52:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:52 crc kubenswrapper[4651]: I1011 04:52:52.525178 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:52 crc kubenswrapper[4651]: I1011 04:52:52.525222 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:52 crc kubenswrapper[4651]: I1011 04:52:52.525231 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:52 crc kubenswrapper[4651]: I1011 04:52:52.525244 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:52 crc kubenswrapper[4651]: I1011 04:52:52.525254 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:52Z","lastTransitionTime":"2025-10-11T04:52:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:52 crc kubenswrapper[4651]: I1011 04:52:52.627673 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:52 crc kubenswrapper[4651]: I1011 04:52:52.627710 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:52 crc kubenswrapper[4651]: I1011 04:52:52.627721 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:52 crc kubenswrapper[4651]: I1011 04:52:52.627736 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:52 crc kubenswrapper[4651]: I1011 04:52:52.627746 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:52Z","lastTransitionTime":"2025-10-11T04:52:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:52 crc kubenswrapper[4651]: I1011 04:52:52.729915 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:52 crc kubenswrapper[4651]: I1011 04:52:52.729953 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:52 crc kubenswrapper[4651]: I1011 04:52:52.729962 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:52 crc kubenswrapper[4651]: I1011 04:52:52.729977 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:52 crc kubenswrapper[4651]: I1011 04:52:52.729987 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:52Z","lastTransitionTime":"2025-10-11T04:52:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:52 crc kubenswrapper[4651]: I1011 04:52:52.833039 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:52 crc kubenswrapper[4651]: I1011 04:52:52.833094 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:52 crc kubenswrapper[4651]: I1011 04:52:52.833111 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:52 crc kubenswrapper[4651]: I1011 04:52:52.833134 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:52 crc kubenswrapper[4651]: I1011 04:52:52.833151 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:52Z","lastTransitionTime":"2025-10-11T04:52:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:52 crc kubenswrapper[4651]: I1011 04:52:52.868552 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 04:52:52 crc kubenswrapper[4651]: E1011 04:52:52.868660 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 04:52:52 crc kubenswrapper[4651]: I1011 04:52:52.936416 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:52 crc kubenswrapper[4651]: I1011 04:52:52.936923 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:52 crc kubenswrapper[4651]: I1011 04:52:52.937020 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:52 crc kubenswrapper[4651]: I1011 04:52:52.937170 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:52 crc kubenswrapper[4651]: I1011 04:52:52.937322 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:52Z","lastTransitionTime":"2025-10-11T04:52:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:53 crc kubenswrapper[4651]: I1011 04:52:53.041180 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:53 crc kubenswrapper[4651]: I1011 04:52:53.041620 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:53 crc kubenswrapper[4651]: I1011 04:52:53.041704 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:53 crc kubenswrapper[4651]: I1011 04:52:53.041783 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:53 crc kubenswrapper[4651]: I1011 04:52:53.041878 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:53Z","lastTransitionTime":"2025-10-11T04:52:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:53 crc kubenswrapper[4651]: I1011 04:52:53.144563 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:53 crc kubenswrapper[4651]: I1011 04:52:53.144599 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:53 crc kubenswrapper[4651]: I1011 04:52:53.144609 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:53 crc kubenswrapper[4651]: I1011 04:52:53.144647 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:53 crc kubenswrapper[4651]: I1011 04:52:53.144661 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:53Z","lastTransitionTime":"2025-10-11T04:52:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:53 crc kubenswrapper[4651]: I1011 04:52:53.248278 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:53 crc kubenswrapper[4651]: I1011 04:52:53.248329 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:53 crc kubenswrapper[4651]: I1011 04:52:53.248340 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:53 crc kubenswrapper[4651]: I1011 04:52:53.248357 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:53 crc kubenswrapper[4651]: I1011 04:52:53.248368 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:53Z","lastTransitionTime":"2025-10-11T04:52:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:53 crc kubenswrapper[4651]: I1011 04:52:53.351080 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:53 crc kubenswrapper[4651]: I1011 04:52:53.351152 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:53 crc kubenswrapper[4651]: I1011 04:52:53.351170 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:53 crc kubenswrapper[4651]: I1011 04:52:53.351196 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:53 crc kubenswrapper[4651]: I1011 04:52:53.351213 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:53Z","lastTransitionTime":"2025-10-11T04:52:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:53 crc kubenswrapper[4651]: I1011 04:52:53.454414 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:53 crc kubenswrapper[4651]: I1011 04:52:53.454488 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:53 crc kubenswrapper[4651]: I1011 04:52:53.454509 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:53 crc kubenswrapper[4651]: I1011 04:52:53.454533 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:53 crc kubenswrapper[4651]: I1011 04:52:53.454552 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:53Z","lastTransitionTime":"2025-10-11T04:52:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:53 crc kubenswrapper[4651]: I1011 04:52:53.557055 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:53 crc kubenswrapper[4651]: I1011 04:52:53.557172 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:53 crc kubenswrapper[4651]: I1011 04:52:53.557189 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:53 crc kubenswrapper[4651]: I1011 04:52:53.557213 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:53 crc kubenswrapper[4651]: I1011 04:52:53.557229 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:53Z","lastTransitionTime":"2025-10-11T04:52:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:53 crc kubenswrapper[4651]: I1011 04:52:53.660410 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:53 crc kubenswrapper[4651]: I1011 04:52:53.660466 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:53 crc kubenswrapper[4651]: I1011 04:52:53.660476 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:53 crc kubenswrapper[4651]: I1011 04:52:53.660495 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:53 crc kubenswrapper[4651]: I1011 04:52:53.660506 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:53Z","lastTransitionTime":"2025-10-11T04:52:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:53 crc kubenswrapper[4651]: I1011 04:52:53.763735 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:53 crc kubenswrapper[4651]: I1011 04:52:53.763800 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:53 crc kubenswrapper[4651]: I1011 04:52:53.763844 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:53 crc kubenswrapper[4651]: I1011 04:52:53.763875 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:53 crc kubenswrapper[4651]: I1011 04:52:53.763898 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:53Z","lastTransitionTime":"2025-10-11T04:52:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:53 crc kubenswrapper[4651]: I1011 04:52:53.866529 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:53 crc kubenswrapper[4651]: I1011 04:52:53.866590 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:53 crc kubenswrapper[4651]: I1011 04:52:53.866607 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:53 crc kubenswrapper[4651]: I1011 04:52:53.866629 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:53 crc kubenswrapper[4651]: I1011 04:52:53.866647 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:53Z","lastTransitionTime":"2025-10-11T04:52:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:53 crc kubenswrapper[4651]: I1011 04:52:53.868905 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 04:52:53 crc kubenswrapper[4651]: I1011 04:52:53.868962 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 04:52:53 crc kubenswrapper[4651]: I1011 04:52:53.869016 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tgvv8" Oct 11 04:52:53 crc kubenswrapper[4651]: E1011 04:52:53.869129 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 04:52:53 crc kubenswrapper[4651]: E1011 04:52:53.869259 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 04:52:53 crc kubenswrapper[4651]: E1011 04:52:53.869555 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tgvv8" podUID="a551fed8-58fb-48ae-88af-8dc0cb48fc30" Oct 11 04:52:53 crc kubenswrapper[4651]: I1011 04:52:53.969180 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:53 crc kubenswrapper[4651]: I1011 04:52:53.969233 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:53 crc kubenswrapper[4651]: I1011 04:52:53.969242 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:53 crc kubenswrapper[4651]: I1011 04:52:53.969261 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:53 crc kubenswrapper[4651]: I1011 04:52:53.969273 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:53Z","lastTransitionTime":"2025-10-11T04:52:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:54 crc kubenswrapper[4651]: I1011 04:52:54.072913 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:54 crc kubenswrapper[4651]: I1011 04:52:54.072969 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:54 crc kubenswrapper[4651]: I1011 04:52:54.072981 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:54 crc kubenswrapper[4651]: I1011 04:52:54.073000 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:54 crc kubenswrapper[4651]: I1011 04:52:54.073012 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:54Z","lastTransitionTime":"2025-10-11T04:52:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:54 crc kubenswrapper[4651]: I1011 04:52:54.175895 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:54 crc kubenswrapper[4651]: I1011 04:52:54.175947 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:54 crc kubenswrapper[4651]: I1011 04:52:54.175964 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:54 crc kubenswrapper[4651]: I1011 04:52:54.175987 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:54 crc kubenswrapper[4651]: I1011 04:52:54.176043 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:54Z","lastTransitionTime":"2025-10-11T04:52:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:54 crc kubenswrapper[4651]: I1011 04:52:54.280398 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:54 crc kubenswrapper[4651]: I1011 04:52:54.280473 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:54 crc kubenswrapper[4651]: I1011 04:52:54.280491 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:54 crc kubenswrapper[4651]: I1011 04:52:54.280518 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:54 crc kubenswrapper[4651]: I1011 04:52:54.280538 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:54Z","lastTransitionTime":"2025-10-11T04:52:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:54 crc kubenswrapper[4651]: I1011 04:52:54.383356 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:54 crc kubenswrapper[4651]: I1011 04:52:54.383407 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:54 crc kubenswrapper[4651]: I1011 04:52:54.383418 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:54 crc kubenswrapper[4651]: I1011 04:52:54.383437 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:54 crc kubenswrapper[4651]: I1011 04:52:54.383450 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:54Z","lastTransitionTime":"2025-10-11T04:52:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:54 crc kubenswrapper[4651]: I1011 04:52:54.486200 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:54 crc kubenswrapper[4651]: I1011 04:52:54.486270 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:54 crc kubenswrapper[4651]: I1011 04:52:54.486307 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:54 crc kubenswrapper[4651]: I1011 04:52:54.486336 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:54 crc kubenswrapper[4651]: I1011 04:52:54.486356 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:54Z","lastTransitionTime":"2025-10-11T04:52:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:54 crc kubenswrapper[4651]: I1011 04:52:54.589306 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:54 crc kubenswrapper[4651]: I1011 04:52:54.589391 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:54 crc kubenswrapper[4651]: I1011 04:52:54.589411 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:54 crc kubenswrapper[4651]: I1011 04:52:54.589442 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:54 crc kubenswrapper[4651]: I1011 04:52:54.589461 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:54Z","lastTransitionTime":"2025-10-11T04:52:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:54 crc kubenswrapper[4651]: I1011 04:52:54.692335 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:54 crc kubenswrapper[4651]: I1011 04:52:54.692385 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:54 crc kubenswrapper[4651]: I1011 04:52:54.692393 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:54 crc kubenswrapper[4651]: I1011 04:52:54.692408 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:54 crc kubenswrapper[4651]: I1011 04:52:54.692418 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:54Z","lastTransitionTime":"2025-10-11T04:52:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:54 crc kubenswrapper[4651]: I1011 04:52:54.795111 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:54 crc kubenswrapper[4651]: I1011 04:52:54.795160 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:54 crc kubenswrapper[4651]: I1011 04:52:54.795170 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:54 crc kubenswrapper[4651]: I1011 04:52:54.795184 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:54 crc kubenswrapper[4651]: I1011 04:52:54.795193 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:54Z","lastTransitionTime":"2025-10-11T04:52:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:54 crc kubenswrapper[4651]: I1011 04:52:54.868662 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 04:52:54 crc kubenswrapper[4651]: E1011 04:52:54.868796 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 04:52:54 crc kubenswrapper[4651]: I1011 04:52:54.898121 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:54 crc kubenswrapper[4651]: I1011 04:52:54.898149 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:54 crc kubenswrapper[4651]: I1011 04:52:54.898161 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:54 crc kubenswrapper[4651]: I1011 04:52:54.898174 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:54 crc kubenswrapper[4651]: I1011 04:52:54.898187 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:54Z","lastTransitionTime":"2025-10-11T04:52:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:55 crc kubenswrapper[4651]: I1011 04:52:54.999957 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:55 crc kubenswrapper[4651]: I1011 04:52:55.000020 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:55 crc kubenswrapper[4651]: I1011 04:52:55.000035 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:55 crc kubenswrapper[4651]: I1011 04:52:55.000062 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:55 crc kubenswrapper[4651]: I1011 04:52:55.000077 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:55Z","lastTransitionTime":"2025-10-11T04:52:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:55 crc kubenswrapper[4651]: I1011 04:52:55.102565 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:55 crc kubenswrapper[4651]: I1011 04:52:55.102615 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:55 crc kubenswrapper[4651]: I1011 04:52:55.102627 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:55 crc kubenswrapper[4651]: I1011 04:52:55.102646 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:55 crc kubenswrapper[4651]: I1011 04:52:55.102659 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:55Z","lastTransitionTime":"2025-10-11T04:52:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:55 crc kubenswrapper[4651]: I1011 04:52:55.205270 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:55 crc kubenswrapper[4651]: I1011 04:52:55.205313 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:55 crc kubenswrapper[4651]: I1011 04:52:55.205323 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:55 crc kubenswrapper[4651]: I1011 04:52:55.205339 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:55 crc kubenswrapper[4651]: I1011 04:52:55.205349 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:55Z","lastTransitionTime":"2025-10-11T04:52:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:55 crc kubenswrapper[4651]: I1011 04:52:55.307466 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:55 crc kubenswrapper[4651]: I1011 04:52:55.307499 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:55 crc kubenswrapper[4651]: I1011 04:52:55.307510 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:55 crc kubenswrapper[4651]: I1011 04:52:55.307556 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:55 crc kubenswrapper[4651]: I1011 04:52:55.307569 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:55Z","lastTransitionTime":"2025-10-11T04:52:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:55 crc kubenswrapper[4651]: I1011 04:52:55.410994 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:55 crc kubenswrapper[4651]: I1011 04:52:55.411067 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:55 crc kubenswrapper[4651]: I1011 04:52:55.411095 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:55 crc kubenswrapper[4651]: I1011 04:52:55.411123 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:55 crc kubenswrapper[4651]: I1011 04:52:55.411145 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:55Z","lastTransitionTime":"2025-10-11T04:52:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:55 crc kubenswrapper[4651]: I1011 04:52:55.516753 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:55 crc kubenswrapper[4651]: I1011 04:52:55.516810 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:55 crc kubenswrapper[4651]: I1011 04:52:55.516846 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:55 crc kubenswrapper[4651]: I1011 04:52:55.516864 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:55 crc kubenswrapper[4651]: I1011 04:52:55.516883 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:55Z","lastTransitionTime":"2025-10-11T04:52:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:55 crc kubenswrapper[4651]: I1011 04:52:55.618746 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:55 crc kubenswrapper[4651]: I1011 04:52:55.619106 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:55 crc kubenswrapper[4651]: I1011 04:52:55.619210 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:55 crc kubenswrapper[4651]: I1011 04:52:55.619298 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:55 crc kubenswrapper[4651]: I1011 04:52:55.619427 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:55Z","lastTransitionTime":"2025-10-11T04:52:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:55 crc kubenswrapper[4651]: I1011 04:52:55.721656 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:55 crc kubenswrapper[4651]: I1011 04:52:55.721696 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:55 crc kubenswrapper[4651]: I1011 04:52:55.721707 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:55 crc kubenswrapper[4651]: I1011 04:52:55.721724 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:55 crc kubenswrapper[4651]: I1011 04:52:55.721735 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:55Z","lastTransitionTime":"2025-10-11T04:52:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:55 crc kubenswrapper[4651]: I1011 04:52:55.824300 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:55 crc kubenswrapper[4651]: I1011 04:52:55.824346 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:55 crc kubenswrapper[4651]: I1011 04:52:55.824355 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:55 crc kubenswrapper[4651]: I1011 04:52:55.824370 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:55 crc kubenswrapper[4651]: I1011 04:52:55.824381 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:55Z","lastTransitionTime":"2025-10-11T04:52:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:55 crc kubenswrapper[4651]: I1011 04:52:55.869270 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 04:52:55 crc kubenswrapper[4651]: I1011 04:52:55.869319 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tgvv8" Oct 11 04:52:55 crc kubenswrapper[4651]: I1011 04:52:55.869317 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 04:52:55 crc kubenswrapper[4651]: E1011 04:52:55.869657 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 04:52:55 crc kubenswrapper[4651]: E1011 04:52:55.869851 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tgvv8" podUID="a551fed8-58fb-48ae-88af-8dc0cb48fc30" Oct 11 04:52:55 crc kubenswrapper[4651]: E1011 04:52:55.869954 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 04:52:55 crc kubenswrapper[4651]: I1011 04:52:55.889209 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Oct 11 04:52:55 crc kubenswrapper[4651]: I1011 04:52:55.927371 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:55 crc kubenswrapper[4651]: I1011 04:52:55.927410 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:55 crc kubenswrapper[4651]: I1011 04:52:55.927418 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:55 crc kubenswrapper[4651]: I1011 04:52:55.927439 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:55 crc kubenswrapper[4651]: I1011 04:52:55.927449 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:55Z","lastTransitionTime":"2025-10-11T04:52:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:56 crc kubenswrapper[4651]: I1011 04:52:56.030095 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:56 crc kubenswrapper[4651]: I1011 04:52:56.030134 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:56 crc kubenswrapper[4651]: I1011 04:52:56.030145 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:56 crc kubenswrapper[4651]: I1011 04:52:56.030162 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:56 crc kubenswrapper[4651]: I1011 04:52:56.030175 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:56Z","lastTransitionTime":"2025-10-11T04:52:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:56 crc kubenswrapper[4651]: I1011 04:52:56.132809 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:56 crc kubenswrapper[4651]: I1011 04:52:56.132862 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:56 crc kubenswrapper[4651]: I1011 04:52:56.132871 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:56 crc kubenswrapper[4651]: I1011 04:52:56.132886 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:56 crc kubenswrapper[4651]: I1011 04:52:56.132896 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:56Z","lastTransitionTime":"2025-10-11T04:52:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:56 crc kubenswrapper[4651]: I1011 04:52:56.236184 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:56 crc kubenswrapper[4651]: I1011 04:52:56.236244 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:56 crc kubenswrapper[4651]: I1011 04:52:56.236265 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:56 crc kubenswrapper[4651]: I1011 04:52:56.236295 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:56 crc kubenswrapper[4651]: I1011 04:52:56.236318 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:56Z","lastTransitionTime":"2025-10-11T04:52:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:56 crc kubenswrapper[4651]: I1011 04:52:56.338803 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:56 crc kubenswrapper[4651]: I1011 04:52:56.338863 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:56 crc kubenswrapper[4651]: I1011 04:52:56.338875 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:56 crc kubenswrapper[4651]: I1011 04:52:56.338891 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:56 crc kubenswrapper[4651]: I1011 04:52:56.338903 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:56Z","lastTransitionTime":"2025-10-11T04:52:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:56 crc kubenswrapper[4651]: I1011 04:52:56.442781 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:56 crc kubenswrapper[4651]: I1011 04:52:56.442841 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:56 crc kubenswrapper[4651]: I1011 04:52:56.442853 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:56 crc kubenswrapper[4651]: I1011 04:52:56.442869 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:56 crc kubenswrapper[4651]: I1011 04:52:56.442880 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:56Z","lastTransitionTime":"2025-10-11T04:52:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:56 crc kubenswrapper[4651]: I1011 04:52:56.545942 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:56 crc kubenswrapper[4651]: I1011 04:52:56.545982 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:56 crc kubenswrapper[4651]: I1011 04:52:56.545994 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:56 crc kubenswrapper[4651]: I1011 04:52:56.546008 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:56 crc kubenswrapper[4651]: I1011 04:52:56.546018 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:56Z","lastTransitionTime":"2025-10-11T04:52:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:56 crc kubenswrapper[4651]: I1011 04:52:56.649212 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:56 crc kubenswrapper[4651]: I1011 04:52:56.649271 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:56 crc kubenswrapper[4651]: I1011 04:52:56.649290 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:56 crc kubenswrapper[4651]: I1011 04:52:56.649318 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:56 crc kubenswrapper[4651]: I1011 04:52:56.649335 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:56Z","lastTransitionTime":"2025-10-11T04:52:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:56 crc kubenswrapper[4651]: I1011 04:52:56.751611 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:56 crc kubenswrapper[4651]: I1011 04:52:56.751683 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:56 crc kubenswrapper[4651]: I1011 04:52:56.751707 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:56 crc kubenswrapper[4651]: I1011 04:52:56.751738 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:56 crc kubenswrapper[4651]: I1011 04:52:56.751770 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:56Z","lastTransitionTime":"2025-10-11T04:52:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:56 crc kubenswrapper[4651]: I1011 04:52:56.860135 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:56 crc kubenswrapper[4651]: I1011 04:52:56.860164 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:56 crc kubenswrapper[4651]: I1011 04:52:56.860171 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:56 crc kubenswrapper[4651]: I1011 04:52:56.860183 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:56 crc kubenswrapper[4651]: I1011 04:52:56.860210 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:56Z","lastTransitionTime":"2025-10-11T04:52:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:56 crc kubenswrapper[4651]: I1011 04:52:56.868465 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 04:52:56 crc kubenswrapper[4651]: E1011 04:52:56.868806 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 04:52:56 crc kubenswrapper[4651]: I1011 04:52:56.870791 4651 scope.go:117] "RemoveContainer" containerID="bedbf9638749a3e9eca8260d325aaa6064bbb0ce8ea6e1cd6042e0039175c968" Oct 11 04:52:56 crc kubenswrapper[4651]: E1011 04:52:56.871097 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-6zt9s_openshift-ovn-kubernetes(28e01c08-a461-4f44-a49c-4bf92fd3a2ce)\"" pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" podUID="28e01c08-a461-4f44-a49c-4bf92fd3a2ce" Oct 11 04:52:56 crc kubenswrapper[4651]: I1011 04:52:56.962767 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:56 crc kubenswrapper[4651]: I1011 04:52:56.962969 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:56 crc kubenswrapper[4651]: I1011 04:52:56.962997 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:56 crc kubenswrapper[4651]: I1011 04:52:56.963027 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:56 crc kubenswrapper[4651]: I1011 04:52:56.963050 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:56Z","lastTransitionTime":"2025-10-11T04:52:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:57 crc kubenswrapper[4651]: I1011 04:52:57.066132 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:57 crc kubenswrapper[4651]: I1011 04:52:57.066197 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:57 crc kubenswrapper[4651]: I1011 04:52:57.066206 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:57 crc kubenswrapper[4651]: I1011 04:52:57.066239 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:57 crc kubenswrapper[4651]: I1011 04:52:57.066250 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:57Z","lastTransitionTime":"2025-10-11T04:52:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:57 crc kubenswrapper[4651]: I1011 04:52:57.169319 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:57 crc kubenswrapper[4651]: I1011 04:52:57.169376 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:57 crc kubenswrapper[4651]: I1011 04:52:57.169392 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:57 crc kubenswrapper[4651]: I1011 04:52:57.169434 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:57 crc kubenswrapper[4651]: I1011 04:52:57.169450 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:57Z","lastTransitionTime":"2025-10-11T04:52:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:57 crc kubenswrapper[4651]: I1011 04:52:57.272584 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:57 crc kubenswrapper[4651]: I1011 04:52:57.272636 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:57 crc kubenswrapper[4651]: I1011 04:52:57.272654 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:57 crc kubenswrapper[4651]: I1011 04:52:57.272676 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:57 crc kubenswrapper[4651]: I1011 04:52:57.272693 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:57Z","lastTransitionTime":"2025-10-11T04:52:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:57 crc kubenswrapper[4651]: I1011 04:52:57.375620 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:57 crc kubenswrapper[4651]: I1011 04:52:57.375730 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:57 crc kubenswrapper[4651]: I1011 04:52:57.375748 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:57 crc kubenswrapper[4651]: I1011 04:52:57.375774 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:57 crc kubenswrapper[4651]: I1011 04:52:57.375796 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:57Z","lastTransitionTime":"2025-10-11T04:52:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:57 crc kubenswrapper[4651]: I1011 04:52:57.399197 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 04:52:57 crc kubenswrapper[4651]: I1011 04:52:57.399244 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 04:52:57 crc kubenswrapper[4651]: I1011 04:52:57.399256 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 04:52:57 crc kubenswrapper[4651]: I1011 04:52:57.399329 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 04:52:57 crc kubenswrapper[4651]: I1011 04:52:57.399348 4651 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T04:52:57Z","lastTransitionTime":"2025-10-11T04:52:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 04:52:57 crc kubenswrapper[4651]: I1011 04:52:57.458986 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-zgdjq"] Oct 11 04:52:57 crc kubenswrapper[4651]: I1011 04:52:57.459844 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-zgdjq" Oct 11 04:52:57 crc kubenswrapper[4651]: I1011 04:52:57.462152 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Oct 11 04:52:57 crc kubenswrapper[4651]: I1011 04:52:57.462510 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Oct 11 04:52:57 crc kubenswrapper[4651]: I1011 04:52:57.462895 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Oct 11 04:52:57 crc kubenswrapper[4651]: I1011 04:52:57.468814 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Oct 11 04:52:57 crc kubenswrapper[4651]: I1011 04:52:57.512140 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=2.512113538 podStartE2EDuration="2.512113538s" podCreationTimestamp="2025-10-11 04:52:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 04:52:57.51100337 +0000 UTC m=+98.407236266" watchObservedRunningTime="2025-10-11 04:52:57.512113538 +0000 UTC m=+98.408346404" Oct 11 04:52:57 crc kubenswrapper[4651]: I1011 04:52:57.519630 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/704296b4-8ba2-489f-bee0-411d4134ae63-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-zgdjq\" (UID: \"704296b4-8ba2-489f-bee0-411d4134ae63\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-zgdjq" Oct 11 04:52:57 crc kubenswrapper[4651]: I1011 04:52:57.519683 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/704296b4-8ba2-489f-bee0-411d4134ae63-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-zgdjq\" (UID: \"704296b4-8ba2-489f-bee0-411d4134ae63\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-zgdjq" Oct 11 04:52:57 crc kubenswrapper[4651]: I1011 04:52:57.519710 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a551fed8-58fb-48ae-88af-8dc0cb48fc30-metrics-certs\") pod \"network-metrics-daemon-tgvv8\" (UID: \"a551fed8-58fb-48ae-88af-8dc0cb48fc30\") " pod="openshift-multus/network-metrics-daemon-tgvv8" Oct 11 04:52:57 crc kubenswrapper[4651]: I1011 04:52:57.519736 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/704296b4-8ba2-489f-bee0-411d4134ae63-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-zgdjq\" (UID: \"704296b4-8ba2-489f-bee0-411d4134ae63\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-zgdjq" Oct 11 04:52:57 crc kubenswrapper[4651]: I1011 04:52:57.519753 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/704296b4-8ba2-489f-bee0-411d4134ae63-service-ca\") pod \"cluster-version-operator-5c965bbfc6-zgdjq\" (UID: \"704296b4-8ba2-489f-bee0-411d4134ae63\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-zgdjq" Oct 11 04:52:57 crc kubenswrapper[4651]: I1011 04:52:57.519768 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/704296b4-8ba2-489f-bee0-411d4134ae63-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-zgdjq\" (UID: \"704296b4-8ba2-489f-bee0-411d4134ae63\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-zgdjq" Oct 11 04:52:57 crc kubenswrapper[4651]: E1011 04:52:57.519905 4651 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 11 04:52:57 crc kubenswrapper[4651]: E1011 04:52:57.519944 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a551fed8-58fb-48ae-88af-8dc0cb48fc30-metrics-certs podName:a551fed8-58fb-48ae-88af-8dc0cb48fc30 nodeName:}" failed. No retries permitted until 2025-10-11 04:54:01.519932586 +0000 UTC m=+162.416165382 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/a551fed8-58fb-48ae-88af-8dc0cb48fc30-metrics-certs") pod "network-metrics-daemon-tgvv8" (UID: "a551fed8-58fb-48ae-88af-8dc0cb48fc30") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 11 04:52:57 crc kubenswrapper[4651]: I1011 04:52:57.620653 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/704296b4-8ba2-489f-bee0-411d4134ae63-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-zgdjq\" (UID: \"704296b4-8ba2-489f-bee0-411d4134ae63\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-zgdjq" Oct 11 04:52:57 crc kubenswrapper[4651]: I1011 04:52:57.620780 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/704296b4-8ba2-489f-bee0-411d4134ae63-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-zgdjq\" (UID: \"704296b4-8ba2-489f-bee0-411d4134ae63\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-zgdjq" Oct 11 04:52:57 crc kubenswrapper[4651]: I1011 04:52:57.620929 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/704296b4-8ba2-489f-bee0-411d4134ae63-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-zgdjq\" (UID: \"704296b4-8ba2-489f-bee0-411d4134ae63\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-zgdjq" Oct 11 04:52:57 crc kubenswrapper[4651]: I1011 04:52:57.620944 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/704296b4-8ba2-489f-bee0-411d4134ae63-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-zgdjq\" (UID: \"704296b4-8ba2-489f-bee0-411d4134ae63\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-zgdjq" Oct 11 04:52:57 crc kubenswrapper[4651]: I1011 04:52:57.621018 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/704296b4-8ba2-489f-bee0-411d4134ae63-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-zgdjq\" (UID: \"704296b4-8ba2-489f-bee0-411d4134ae63\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-zgdjq" Oct 11 04:52:57 crc kubenswrapper[4651]: I1011 04:52:57.621053 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/704296b4-8ba2-489f-bee0-411d4134ae63-service-ca\") pod \"cluster-version-operator-5c965bbfc6-zgdjq\" (UID: \"704296b4-8ba2-489f-bee0-411d4134ae63\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-zgdjq" Oct 11 04:52:57 crc kubenswrapper[4651]: I1011 04:52:57.621114 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/704296b4-8ba2-489f-bee0-411d4134ae63-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-zgdjq\" (UID: \"704296b4-8ba2-489f-bee0-411d4134ae63\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-zgdjq" Oct 11 04:52:57 crc kubenswrapper[4651]: I1011 04:52:57.622677 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/704296b4-8ba2-489f-bee0-411d4134ae63-service-ca\") pod \"cluster-version-operator-5c965bbfc6-zgdjq\" (UID: \"704296b4-8ba2-489f-bee0-411d4134ae63\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-zgdjq" Oct 11 04:52:57 crc kubenswrapper[4651]: I1011 04:52:57.628928 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/704296b4-8ba2-489f-bee0-411d4134ae63-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-zgdjq\" (UID: \"704296b4-8ba2-489f-bee0-411d4134ae63\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-zgdjq" Oct 11 04:52:57 crc kubenswrapper[4651]: I1011 04:52:57.648405 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/704296b4-8ba2-489f-bee0-411d4134ae63-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-zgdjq\" (UID: \"704296b4-8ba2-489f-bee0-411d4134ae63\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-zgdjq" Oct 11 04:52:57 crc kubenswrapper[4651]: I1011 04:52:57.784571 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-zgdjq" Oct 11 04:52:57 crc kubenswrapper[4651]: I1011 04:52:57.868624 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 04:52:57 crc kubenswrapper[4651]: I1011 04:52:57.868746 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 04:52:57 crc kubenswrapper[4651]: E1011 04:52:57.868788 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 04:52:57 crc kubenswrapper[4651]: I1011 04:52:57.868902 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tgvv8" Oct 11 04:52:57 crc kubenswrapper[4651]: E1011 04:52:57.869092 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 04:52:57 crc kubenswrapper[4651]: E1011 04:52:57.869291 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tgvv8" podUID="a551fed8-58fb-48ae-88af-8dc0cb48fc30" Oct 11 04:52:58 crc kubenswrapper[4651]: I1011 04:52:58.415003 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-zgdjq" event={"ID":"704296b4-8ba2-489f-bee0-411d4134ae63","Type":"ContainerStarted","Data":"48d11a08dad61ba8e9cc3a146e5657d98af117e3435ad204989ae7f4d872efb6"} Oct 11 04:52:58 crc kubenswrapper[4651]: I1011 04:52:58.415417 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-zgdjq" event={"ID":"704296b4-8ba2-489f-bee0-411d4134ae63","Type":"ContainerStarted","Data":"4bbf281c60689ae3792ef8ee51dc66f87d34f38b1d87c79e37da0d3be9d36e39"} Oct 11 04:52:58 crc kubenswrapper[4651]: I1011 04:52:58.433911 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-zgdjq" podStartSLOduration=79.433884837 podStartE2EDuration="1m19.433884837s" podCreationTimestamp="2025-10-11 04:51:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 04:52:58.433064426 +0000 UTC m=+99.329297292" watchObservedRunningTime="2025-10-11 04:52:58.433884837 +0000 UTC m=+99.330117663" Oct 11 04:52:58 crc kubenswrapper[4651]: I1011 04:52:58.868884 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 04:52:58 crc kubenswrapper[4651]: E1011 04:52:58.869091 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 04:52:59 crc kubenswrapper[4651]: I1011 04:52:59.868939 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 04:52:59 crc kubenswrapper[4651]: I1011 04:52:59.869012 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 04:52:59 crc kubenswrapper[4651]: I1011 04:52:59.868968 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tgvv8" Oct 11 04:52:59 crc kubenswrapper[4651]: E1011 04:52:59.870842 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 04:52:59 crc kubenswrapper[4651]: E1011 04:52:59.870910 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 04:52:59 crc kubenswrapper[4651]: E1011 04:52:59.871014 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tgvv8" podUID="a551fed8-58fb-48ae-88af-8dc0cb48fc30" Oct 11 04:53:00 crc kubenswrapper[4651]: I1011 04:53:00.903023 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 04:53:00 crc kubenswrapper[4651]: E1011 04:53:00.903135 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 04:53:01 crc kubenswrapper[4651]: I1011 04:53:01.870170 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 04:53:01 crc kubenswrapper[4651]: E1011 04:53:01.870282 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 04:53:01 crc kubenswrapper[4651]: I1011 04:53:01.870462 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tgvv8" Oct 11 04:53:01 crc kubenswrapper[4651]: E1011 04:53:01.870511 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tgvv8" podUID="a551fed8-58fb-48ae-88af-8dc0cb48fc30" Oct 11 04:53:01 crc kubenswrapper[4651]: I1011 04:53:01.870672 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 04:53:01 crc kubenswrapper[4651]: E1011 04:53:01.870715 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 04:53:02 crc kubenswrapper[4651]: I1011 04:53:02.868743 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 04:53:02 crc kubenswrapper[4651]: E1011 04:53:02.868960 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 04:53:03 crc kubenswrapper[4651]: I1011 04:53:03.868472 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 04:53:03 crc kubenswrapper[4651]: I1011 04:53:03.868542 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 04:53:03 crc kubenswrapper[4651]: E1011 04:53:03.868612 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 04:53:03 crc kubenswrapper[4651]: E1011 04:53:03.868702 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 04:53:03 crc kubenswrapper[4651]: I1011 04:53:03.868745 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tgvv8" Oct 11 04:53:03 crc kubenswrapper[4651]: E1011 04:53:03.868951 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tgvv8" podUID="a551fed8-58fb-48ae-88af-8dc0cb48fc30" Oct 11 04:53:04 crc kubenswrapper[4651]: I1011 04:53:04.869253 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 04:53:04 crc kubenswrapper[4651]: E1011 04:53:04.869465 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 04:53:05 crc kubenswrapper[4651]: I1011 04:53:05.868905 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 04:53:05 crc kubenswrapper[4651]: I1011 04:53:05.869040 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 04:53:05 crc kubenswrapper[4651]: E1011 04:53:05.869081 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 04:53:05 crc kubenswrapper[4651]: I1011 04:53:05.869194 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tgvv8" Oct 11 04:53:05 crc kubenswrapper[4651]: E1011 04:53:05.869564 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 04:53:05 crc kubenswrapper[4651]: E1011 04:53:05.869661 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tgvv8" podUID="a551fed8-58fb-48ae-88af-8dc0cb48fc30" Oct 11 04:53:06 crc kubenswrapper[4651]: I1011 04:53:06.869322 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 04:53:06 crc kubenswrapper[4651]: E1011 04:53:06.870075 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 04:53:07 crc kubenswrapper[4651]: I1011 04:53:07.869460 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 04:53:07 crc kubenswrapper[4651]: I1011 04:53:07.869554 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 04:53:07 crc kubenswrapper[4651]: I1011 04:53:07.869481 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tgvv8" Oct 11 04:53:07 crc kubenswrapper[4651]: E1011 04:53:07.869727 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 04:53:07 crc kubenswrapper[4651]: E1011 04:53:07.869877 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tgvv8" podUID="a551fed8-58fb-48ae-88af-8dc0cb48fc30" Oct 11 04:53:07 crc kubenswrapper[4651]: E1011 04:53:07.870000 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 04:53:08 crc kubenswrapper[4651]: I1011 04:53:08.869482 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 04:53:08 crc kubenswrapper[4651]: E1011 04:53:08.869735 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 04:53:09 crc kubenswrapper[4651]: I1011 04:53:09.869309 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 04:53:09 crc kubenswrapper[4651]: I1011 04:53:09.869362 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tgvv8" Oct 11 04:53:09 crc kubenswrapper[4651]: E1011 04:53:09.871229 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 04:53:09 crc kubenswrapper[4651]: I1011 04:53:09.871273 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 04:53:09 crc kubenswrapper[4651]: E1011 04:53:09.871968 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 04:53:09 crc kubenswrapper[4651]: E1011 04:53:09.871815 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tgvv8" podUID="a551fed8-58fb-48ae-88af-8dc0cb48fc30" Oct 11 04:53:09 crc kubenswrapper[4651]: I1011 04:53:09.872644 4651 scope.go:117] "RemoveContainer" containerID="bedbf9638749a3e9eca8260d325aaa6064bbb0ce8ea6e1cd6042e0039175c968" Oct 11 04:53:09 crc kubenswrapper[4651]: E1011 04:53:09.872951 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-6zt9s_openshift-ovn-kubernetes(28e01c08-a461-4f44-a49c-4bf92fd3a2ce)\"" pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" podUID="28e01c08-a461-4f44-a49c-4bf92fd3a2ce" Oct 11 04:53:10 crc kubenswrapper[4651]: I1011 04:53:10.869270 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 04:53:10 crc kubenswrapper[4651]: E1011 04:53:10.869455 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 04:53:11 crc kubenswrapper[4651]: I1011 04:53:11.869303 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 04:53:11 crc kubenswrapper[4651]: E1011 04:53:11.869449 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 04:53:11 crc kubenswrapper[4651]: I1011 04:53:11.869700 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tgvv8" Oct 11 04:53:11 crc kubenswrapper[4651]: I1011 04:53:11.869973 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 04:53:11 crc kubenswrapper[4651]: E1011 04:53:11.869983 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tgvv8" podUID="a551fed8-58fb-48ae-88af-8dc0cb48fc30" Oct 11 04:53:11 crc kubenswrapper[4651]: E1011 04:53:11.870120 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 04:53:12 crc kubenswrapper[4651]: I1011 04:53:12.869501 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 04:53:12 crc kubenswrapper[4651]: E1011 04:53:12.869780 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 04:53:13 crc kubenswrapper[4651]: I1011 04:53:13.869174 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 04:53:13 crc kubenswrapper[4651]: I1011 04:53:13.869279 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 04:53:13 crc kubenswrapper[4651]: I1011 04:53:13.869354 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tgvv8" Oct 11 04:53:13 crc kubenswrapper[4651]: E1011 04:53:13.869582 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 04:53:13 crc kubenswrapper[4651]: E1011 04:53:13.869674 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 04:53:13 crc kubenswrapper[4651]: E1011 04:53:13.869815 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tgvv8" podUID="a551fed8-58fb-48ae-88af-8dc0cb48fc30" Oct 11 04:53:14 crc kubenswrapper[4651]: I1011 04:53:14.473460 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-wz4hw_fbfdd781-994b-49b4-9c8e-edc0ea4145d1/kube-multus/1.log" Oct 11 04:53:14 crc kubenswrapper[4651]: I1011 04:53:14.474365 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-wz4hw_fbfdd781-994b-49b4-9c8e-edc0ea4145d1/kube-multus/0.log" Oct 11 04:53:14 crc kubenswrapper[4651]: I1011 04:53:14.474432 4651 generic.go:334] "Generic (PLEG): container finished" podID="fbfdd781-994b-49b4-9c8e-edc0ea4145d1" containerID="453b8d5da6858078639895d4d19bb1783eefeeb7c558eec6984f6ebdadd5d8fc" exitCode=1 Oct 11 04:53:14 crc kubenswrapper[4651]: I1011 04:53:14.474482 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-wz4hw" event={"ID":"fbfdd781-994b-49b4-9c8e-edc0ea4145d1","Type":"ContainerDied","Data":"453b8d5da6858078639895d4d19bb1783eefeeb7c558eec6984f6ebdadd5d8fc"} Oct 11 04:53:14 crc kubenswrapper[4651]: I1011 04:53:14.474535 4651 scope.go:117] "RemoveContainer" containerID="2d37b5524c49533e7dad8b61f0e20b7d1e37b7ae1afc5b9bfe9146a0744202a2" Oct 11 04:53:14 crc kubenswrapper[4651]: I1011 04:53:14.475597 4651 scope.go:117] "RemoveContainer" containerID="453b8d5da6858078639895d4d19bb1783eefeeb7c558eec6984f6ebdadd5d8fc" Oct 11 04:53:14 crc kubenswrapper[4651]: E1011 04:53:14.476257 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-wz4hw_openshift-multus(fbfdd781-994b-49b4-9c8e-edc0ea4145d1)\"" pod="openshift-multus/multus-wz4hw" podUID="fbfdd781-994b-49b4-9c8e-edc0ea4145d1" Oct 11 04:53:14 crc kubenswrapper[4651]: I1011 04:53:14.869399 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 04:53:14 crc kubenswrapper[4651]: E1011 04:53:14.869961 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 04:53:15 crc kubenswrapper[4651]: I1011 04:53:15.479557 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-wz4hw_fbfdd781-994b-49b4-9c8e-edc0ea4145d1/kube-multus/1.log" Oct 11 04:53:15 crc kubenswrapper[4651]: I1011 04:53:15.868689 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 04:53:15 crc kubenswrapper[4651]: I1011 04:53:15.868920 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 04:53:15 crc kubenswrapper[4651]: I1011 04:53:15.868689 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tgvv8" Oct 11 04:53:15 crc kubenswrapper[4651]: E1011 04:53:15.869065 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 04:53:15 crc kubenswrapper[4651]: E1011 04:53:15.869253 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tgvv8" podUID="a551fed8-58fb-48ae-88af-8dc0cb48fc30" Oct 11 04:53:15 crc kubenswrapper[4651]: E1011 04:53:15.869404 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 04:53:16 crc kubenswrapper[4651]: I1011 04:53:16.868474 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 04:53:16 crc kubenswrapper[4651]: E1011 04:53:16.868947 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 04:53:17 crc kubenswrapper[4651]: I1011 04:53:17.869566 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 04:53:17 crc kubenswrapper[4651]: I1011 04:53:17.869637 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 04:53:17 crc kubenswrapper[4651]: I1011 04:53:17.869606 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tgvv8" Oct 11 04:53:17 crc kubenswrapper[4651]: E1011 04:53:17.869849 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 04:53:17 crc kubenswrapper[4651]: E1011 04:53:17.870006 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tgvv8" podUID="a551fed8-58fb-48ae-88af-8dc0cb48fc30" Oct 11 04:53:17 crc kubenswrapper[4651]: E1011 04:53:17.870243 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 04:53:18 crc kubenswrapper[4651]: I1011 04:53:18.869226 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 04:53:18 crc kubenswrapper[4651]: E1011 04:53:18.869429 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 04:53:19 crc kubenswrapper[4651]: E1011 04:53:19.802167 4651 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Oct 11 04:53:19 crc kubenswrapper[4651]: I1011 04:53:19.869518 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tgvv8" Oct 11 04:53:19 crc kubenswrapper[4651]: I1011 04:53:19.869638 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 04:53:19 crc kubenswrapper[4651]: I1011 04:53:19.871410 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 04:53:19 crc kubenswrapper[4651]: E1011 04:53:19.871395 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tgvv8" podUID="a551fed8-58fb-48ae-88af-8dc0cb48fc30" Oct 11 04:53:19 crc kubenswrapper[4651]: E1011 04:53:19.871943 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 04:53:19 crc kubenswrapper[4651]: E1011 04:53:19.872059 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 04:53:19 crc kubenswrapper[4651]: E1011 04:53:19.984232 4651 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Oct 11 04:53:20 crc kubenswrapper[4651]: I1011 04:53:20.868752 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 04:53:20 crc kubenswrapper[4651]: E1011 04:53:20.868999 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 04:53:21 crc kubenswrapper[4651]: I1011 04:53:21.868504 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 04:53:21 crc kubenswrapper[4651]: E1011 04:53:21.868641 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 04:53:21 crc kubenswrapper[4651]: I1011 04:53:21.868878 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 04:53:21 crc kubenswrapper[4651]: E1011 04:53:21.868946 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 04:53:21 crc kubenswrapper[4651]: I1011 04:53:21.869182 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tgvv8" Oct 11 04:53:21 crc kubenswrapper[4651]: E1011 04:53:21.869283 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tgvv8" podUID="a551fed8-58fb-48ae-88af-8dc0cb48fc30" Oct 11 04:53:22 crc kubenswrapper[4651]: I1011 04:53:22.869491 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 04:53:22 crc kubenswrapper[4651]: E1011 04:53:22.869750 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 04:53:22 crc kubenswrapper[4651]: I1011 04:53:22.871086 4651 scope.go:117] "RemoveContainer" containerID="bedbf9638749a3e9eca8260d325aaa6064bbb0ce8ea6e1cd6042e0039175c968" Oct 11 04:53:23 crc kubenswrapper[4651]: I1011 04:53:23.512753 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6zt9s_28e01c08-a461-4f44-a49c-4bf92fd3a2ce/ovnkube-controller/3.log" Oct 11 04:53:23 crc kubenswrapper[4651]: I1011 04:53:23.516094 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" event={"ID":"28e01c08-a461-4f44-a49c-4bf92fd3a2ce","Type":"ContainerStarted","Data":"ff59b524a1c2589496d6c2c71a6ce2ad3b7b309b0e6e25f0869d10c12a5a877e"} Oct 11 04:53:23 crc kubenswrapper[4651]: I1011 04:53:23.516712 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" Oct 11 04:53:23 crc kubenswrapper[4651]: I1011 04:53:23.869119 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 04:53:23 crc kubenswrapper[4651]: I1011 04:53:23.869284 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 04:53:23 crc kubenswrapper[4651]: E1011 04:53:23.869395 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 04:53:23 crc kubenswrapper[4651]: I1011 04:53:23.869487 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tgvv8" Oct 11 04:53:23 crc kubenswrapper[4651]: E1011 04:53:23.869669 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 04:53:23 crc kubenswrapper[4651]: E1011 04:53:23.870087 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tgvv8" podUID="a551fed8-58fb-48ae-88af-8dc0cb48fc30" Oct 11 04:53:23 crc kubenswrapper[4651]: I1011 04:53:23.902013 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" podStartSLOduration=104.901991119 podStartE2EDuration="1m44.901991119s" podCreationTimestamp="2025-10-11 04:51:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 04:53:23.556672399 +0000 UTC m=+124.452905245" watchObservedRunningTime="2025-10-11 04:53:23.901991119 +0000 UTC m=+124.798223925" Oct 11 04:53:23 crc kubenswrapper[4651]: I1011 04:53:23.903292 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-tgvv8"] Oct 11 04:53:24 crc kubenswrapper[4651]: I1011 04:53:24.519551 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tgvv8" Oct 11 04:53:24 crc kubenswrapper[4651]: E1011 04:53:24.520037 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tgvv8" podUID="a551fed8-58fb-48ae-88af-8dc0cb48fc30" Oct 11 04:53:24 crc kubenswrapper[4651]: I1011 04:53:24.869450 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 04:53:24 crc kubenswrapper[4651]: E1011 04:53:24.869771 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 04:53:24 crc kubenswrapper[4651]: E1011 04:53:24.985554 4651 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Oct 11 04:53:25 crc kubenswrapper[4651]: I1011 04:53:25.868456 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 04:53:25 crc kubenswrapper[4651]: I1011 04:53:25.868481 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 04:53:25 crc kubenswrapper[4651]: E1011 04:53:25.868641 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 04:53:25 crc kubenswrapper[4651]: E1011 04:53:25.868766 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 04:53:26 crc kubenswrapper[4651]: I1011 04:53:26.868601 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 04:53:26 crc kubenswrapper[4651]: I1011 04:53:26.868601 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tgvv8" Oct 11 04:53:26 crc kubenswrapper[4651]: E1011 04:53:26.869543 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 04:53:26 crc kubenswrapper[4651]: I1011 04:53:26.869559 4651 scope.go:117] "RemoveContainer" containerID="453b8d5da6858078639895d4d19bb1783eefeeb7c558eec6984f6ebdadd5d8fc" Oct 11 04:53:26 crc kubenswrapper[4651]: E1011 04:53:26.869810 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tgvv8" podUID="a551fed8-58fb-48ae-88af-8dc0cb48fc30" Oct 11 04:53:27 crc kubenswrapper[4651]: I1011 04:53:27.530988 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-wz4hw_fbfdd781-994b-49b4-9c8e-edc0ea4145d1/kube-multus/1.log" Oct 11 04:53:27 crc kubenswrapper[4651]: I1011 04:53:27.531388 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-wz4hw" event={"ID":"fbfdd781-994b-49b4-9c8e-edc0ea4145d1","Type":"ContainerStarted","Data":"593f6c9a7505c4ee7c5c917e6b485fb997517f68d952455303c896adae9f2391"} Oct 11 04:53:27 crc kubenswrapper[4651]: I1011 04:53:27.868963 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 04:53:27 crc kubenswrapper[4651]: I1011 04:53:27.869007 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 04:53:27 crc kubenswrapper[4651]: E1011 04:53:27.869184 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 04:53:27 crc kubenswrapper[4651]: E1011 04:53:27.869303 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 04:53:28 crc kubenswrapper[4651]: I1011 04:53:28.868556 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tgvv8" Oct 11 04:53:28 crc kubenswrapper[4651]: E1011 04:53:28.868748 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-tgvv8" podUID="a551fed8-58fb-48ae-88af-8dc0cb48fc30" Oct 11 04:53:28 crc kubenswrapper[4651]: I1011 04:53:28.868593 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 04:53:28 crc kubenswrapper[4651]: E1011 04:53:28.869052 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 04:53:29 crc kubenswrapper[4651]: I1011 04:53:29.869262 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 04:53:29 crc kubenswrapper[4651]: I1011 04:53:29.869321 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 04:53:29 crc kubenswrapper[4651]: E1011 04:53:29.871920 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 04:53:29 crc kubenswrapper[4651]: E1011 04:53:29.872216 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 04:53:30 crc kubenswrapper[4651]: I1011 04:53:30.869219 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 04:53:30 crc kubenswrapper[4651]: I1011 04:53:30.869748 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tgvv8" Oct 11 04:53:30 crc kubenswrapper[4651]: I1011 04:53:30.873206 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Oct 11 04:53:30 crc kubenswrapper[4651]: I1011 04:53:30.873438 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Oct 11 04:53:30 crc kubenswrapper[4651]: I1011 04:53:30.873440 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Oct 11 04:53:30 crc kubenswrapper[4651]: I1011 04:53:30.873568 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Oct 11 04:53:31 crc kubenswrapper[4651]: I1011 04:53:31.870292 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 04:53:31 crc kubenswrapper[4651]: I1011 04:53:31.870773 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 04:53:31 crc kubenswrapper[4651]: I1011 04:53:31.873184 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Oct 11 04:53:31 crc kubenswrapper[4651]: I1011 04:53:31.876080 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.352522 4651 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.412808 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-pkhrx"] Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.413578 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-pkhrx" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.417263 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.418983 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-ws6wr"] Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.419665 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-ws6wr" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.421394 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.421494 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.421812 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.423318 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.423513 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-2jmxt"] Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.424042 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-2jmxt" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.425418 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.425950 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.426307 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.426332 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.426464 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.426671 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.427686 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-2hhkn"] Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.429206 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.429613 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.429653 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.429987 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.430038 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.433460 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-2hhkn" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.438896 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-s9qnd"] Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.439647 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-2mj56"] Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.440634 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-2mj56" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.440895 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-2f67z"] Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.441279 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-s9qnd" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.442464 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-2f67z" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.446875 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.447182 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.447210 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.447433 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.447433 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.447974 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.450087 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-n4hfz"] Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.451039 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-n4hfz" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.451257 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-xzfcs"] Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.452191 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xzfcs" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.454232 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-rgxlz"] Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.455033 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-rgxlz" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.473526 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.475494 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.475516 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.475663 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.475758 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.475958 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.476058 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.476198 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.476279 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.476488 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.476494 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.476723 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.476729 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.479365 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.479472 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.479746 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.482212 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.484108 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.484505 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.484615 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.484919 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.485076 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.485495 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.485543 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.485657 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.485747 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.485812 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.486185 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.486197 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.486446 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.487032 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-mdhlw"] Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.487365 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.488024 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.488184 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.488229 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.488291 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.488461 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.488527 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.488924 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.488972 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-mdhlw" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.504736 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.511042 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-pc67v"] Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.511706 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-htjqx"] Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.512262 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-htjqx" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.512637 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-pc67v" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.514023 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-h9rlq"] Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.514891 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-h9rlq" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.516357 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.520911 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4lsqd"] Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.521418 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4lsqd" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.524356 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-zqjmp"] Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.524857 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-thfrv"] Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.524998 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.525163 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-thfrv" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.525451 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-zqjmp" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.526522 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-2lxzd"] Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.526931 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-2lxzd" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.527486 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-27wgh"] Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.527811 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-27wgh" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.528237 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.528367 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.528540 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.528626 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.528702 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.528780 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.528902 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.530397 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.532580 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.532681 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-zbqhj"] Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.533495 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-zbqhj" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.534360 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.534977 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.536691 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.536756 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.536913 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.536953 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.536995 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.537032 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.537068 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.537170 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.537267 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.537300 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.537331 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.537621 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.544220 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-v5ktt"] Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.544954 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-p8tts"] Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.545544 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-p8tts" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.546353 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-v5ktt" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.549756 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-9jbmn"] Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.550387 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.550696 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-9jbmn" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.553519 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.554092 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.554123 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.554332 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.555056 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.560123 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.560925 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/49755973-5d14-4c72-9858-7edca1f2c2ee-config\") pod \"apiserver-76f77b778f-n4hfz\" (UID: \"49755973-5d14-4c72-9858-7edca1f2c2ee\") " pod="openshift-apiserver/apiserver-76f77b778f-n4hfz" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.560970 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/49755973-5d14-4c72-9858-7edca1f2c2ee-encryption-config\") pod \"apiserver-76f77b778f-n4hfz\" (UID: \"49755973-5d14-4c72-9858-7edca1f2c2ee\") " pod="openshift-apiserver/apiserver-76f77b778f-n4hfz" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.560995 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/5fb1ac4f-b0d1-4314-ac06-d887654fa3f5-machine-approver-tls\") pod \"machine-approver-56656f9798-2f67z\" (UID: \"5fb1ac4f-b0d1-4314-ac06-d887654fa3f5\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-2f67z" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.561023 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/49755973-5d14-4c72-9858-7edca1f2c2ee-serving-cert\") pod \"apiserver-76f77b778f-n4hfz\" (UID: \"49755973-5d14-4c72-9858-7edca1f2c2ee\") " pod="openshift-apiserver/apiserver-76f77b778f-n4hfz" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.569811 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.569900 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.570002 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.568522 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/5fb1ac4f-b0d1-4314-ac06-d887654fa3f5-auth-proxy-config\") pod \"machine-approver-56656f9798-2f67z\" (UID: \"5fb1ac4f-b0d1-4314-ac06-d887654fa3f5\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-2f67z" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.570112 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7bafdf97-6219-440a-a3b0-49c55c2a3b5b-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-ws6wr\" (UID: \"7bafdf97-6219-440a-a3b0-49c55c2a3b5b\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-ws6wr" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.570183 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f90412cd-2f6a-4322-b3b7-29904de3b09c-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-xzfcs\" (UID: \"f90412cd-2f6a-4322-b3b7-29904de3b09c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xzfcs" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.570227 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/2797c45d-e1d7-44d7-b936-44048593f540-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-2hhkn\" (UID: \"2797c45d-e1d7-44d7-b936-44048593f540\") " pod="openshift-controller-manager/controller-manager-879f6c89f-2hhkn" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.570266 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b590e4d5-1684-4e2f-b5e9-8fbf00db4546-config\") pod \"machine-api-operator-5694c8668f-2mj56\" (UID: \"b590e4d5-1684-4e2f-b5e9-8fbf00db4546\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-2mj56" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.570323 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8jnx4\" (UniqueName: \"kubernetes.io/projected/27505683-e595-4855-8a29-aceee78542b6-kube-api-access-8jnx4\") pod \"route-controller-manager-6576b87f9c-pkhrx\" (UID: \"27505683-e595-4855-8a29-aceee78542b6\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-pkhrx" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.570564 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5fb1ac4f-b0d1-4314-ac06-d887654fa3f5-config\") pod \"machine-approver-56656f9798-2f67z\" (UID: \"5fb1ac4f-b0d1-4314-ac06-d887654fa3f5\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-2f67z" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.570639 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/27505683-e595-4855-8a29-aceee78542b6-client-ca\") pod \"route-controller-manager-6576b87f9c-pkhrx\" (UID: \"27505683-e595-4855-8a29-aceee78542b6\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-pkhrx" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.570663 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.570680 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/f90412cd-2f6a-4322-b3b7-29904de3b09c-encryption-config\") pod \"apiserver-7bbb656c7d-xzfcs\" (UID: \"f90412cd-2f6a-4322-b3b7-29904de3b09c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xzfcs" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.570714 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/49755973-5d14-4c72-9858-7edca1f2c2ee-audit\") pod \"apiserver-76f77b778f-n4hfz\" (UID: \"49755973-5d14-4c72-9858-7edca1f2c2ee\") " pod="openshift-apiserver/apiserver-76f77b778f-n4hfz" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.570748 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/49755973-5d14-4c72-9858-7edca1f2c2ee-etcd-serving-ca\") pod \"apiserver-76f77b778f-n4hfz\" (UID: \"49755973-5d14-4c72-9858-7edca1f2c2ee\") " pod="openshift-apiserver/apiserver-76f77b778f-n4hfz" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.570778 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/b590e4d5-1684-4e2f-b5e9-8fbf00db4546-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-2mj56\" (UID: \"b590e4d5-1684-4e2f-b5e9-8fbf00db4546\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-2mj56" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.570845 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-445ts\" (UniqueName: \"kubernetes.io/projected/391e51dc-8070-40bf-ac61-33c1ef37c72b-kube-api-access-445ts\") pod \"cluster-samples-operator-665b6dd947-rgxlz\" (UID: \"391e51dc-8070-40bf-ac61-33c1ef37c72b\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-rgxlz" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.570893 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/f90412cd-2f6a-4322-b3b7-29904de3b09c-audit-policies\") pod \"apiserver-7bbb656c7d-xzfcs\" (UID: \"f90412cd-2f6a-4322-b3b7-29904de3b09c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xzfcs" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.570933 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/aa722226-d2d2-4122-93fa-1aeee25b7868-serving-cert\") pod \"console-operator-58897d9998-mdhlw\" (UID: \"aa722226-d2d2-4122-93fa-1aeee25b7868\") " pod="openshift-console-operator/console-operator-58897d9998-mdhlw" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.571009 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/f90412cd-2f6a-4322-b3b7-29904de3b09c-etcd-client\") pod \"apiserver-7bbb656c7d-xzfcs\" (UID: \"f90412cd-2f6a-4322-b3b7-29904de3b09c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xzfcs" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.571045 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nbpll\" (UniqueName: \"kubernetes.io/projected/f90412cd-2f6a-4322-b3b7-29904de3b09c-kube-api-access-nbpll\") pod \"apiserver-7bbb656c7d-xzfcs\" (UID: \"f90412cd-2f6a-4322-b3b7-29904de3b09c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xzfcs" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.571130 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.571320 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2797c45d-e1d7-44d7-b936-44048593f540-config\") pod \"controller-manager-879f6c89f-2hhkn\" (UID: \"2797c45d-e1d7-44d7-b936-44048593f540\") " pod="openshift-controller-manager/controller-manager-879f6c89f-2hhkn" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.571375 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/49755973-5d14-4c72-9858-7edca1f2c2ee-node-pullsecrets\") pod \"apiserver-76f77b778f-n4hfz\" (UID: \"49755973-5d14-4c72-9858-7edca1f2c2ee\") " pod="openshift-apiserver/apiserver-76f77b778f-n4hfz" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.571418 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g9k8p\" (UniqueName: \"kubernetes.io/projected/5c2e6635-02f1-4869-9d20-7577116611ba-kube-api-access-g9k8p\") pod \"downloads-7954f5f757-s9qnd\" (UID: \"5c2e6635-02f1-4869-9d20-7577116611ba\") " pod="openshift-console/downloads-7954f5f757-s9qnd" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.571449 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/b590e4d5-1684-4e2f-b5e9-8fbf00db4546-images\") pod \"machine-api-operator-5694c8668f-2mj56\" (UID: \"b590e4d5-1684-4e2f-b5e9-8fbf00db4546\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-2mj56" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.571502 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/67e0d9bc-63c7-4509-b804-d63705caa189-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-2jmxt\" (UID: \"67e0d9bc-63c7-4509-b804-d63705caa189\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-2jmxt" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.571562 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ql7bg\" (UniqueName: \"kubernetes.io/projected/49755973-5d14-4c72-9858-7edca1f2c2ee-kube-api-access-ql7bg\") pod \"apiserver-76f77b778f-n4hfz\" (UID: \"49755973-5d14-4c72-9858-7edca1f2c2ee\") " pod="openshift-apiserver/apiserver-76f77b778f-n4hfz" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.571587 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.571595 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7d852\" (UniqueName: \"kubernetes.io/projected/7bafdf97-6219-440a-a3b0-49c55c2a3b5b-kube-api-access-7d852\") pod \"openshift-apiserver-operator-796bbdcf4f-ws6wr\" (UID: \"7bafdf97-6219-440a-a3b0-49c55c2a3b5b\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-ws6wr" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.571755 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/67e0d9bc-63c7-4509-b804-d63705caa189-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-2jmxt\" (UID: \"67e0d9bc-63c7-4509-b804-d63705caa189\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-2jmxt" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.571804 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/391e51dc-8070-40bf-ac61-33c1ef37c72b-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-rgxlz\" (UID: \"391e51dc-8070-40bf-ac61-33c1ef37c72b\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-rgxlz" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.571977 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2797c45d-e1d7-44d7-b936-44048593f540-serving-cert\") pod \"controller-manager-879f6c89f-2hhkn\" (UID: \"2797c45d-e1d7-44d7-b936-44048593f540\") " pod="openshift-controller-manager/controller-manager-879f6c89f-2hhkn" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.572021 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/27505683-e595-4855-8a29-aceee78542b6-serving-cert\") pod \"route-controller-manager-6576b87f9c-pkhrx\" (UID: \"27505683-e595-4855-8a29-aceee78542b6\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-pkhrx" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.572133 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p5r9g\" (UniqueName: \"kubernetes.io/projected/2797c45d-e1d7-44d7-b936-44048593f540-kube-api-access-p5r9g\") pod \"controller-manager-879f6c89f-2hhkn\" (UID: \"2797c45d-e1d7-44d7-b936-44048593f540\") " pod="openshift-controller-manager/controller-manager-879f6c89f-2hhkn" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.572173 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aa722226-d2d2-4122-93fa-1aeee25b7868-config\") pod \"console-operator-58897d9998-mdhlw\" (UID: \"aa722226-d2d2-4122-93fa-1aeee25b7868\") " pod="openshift-console-operator/console-operator-58897d9998-mdhlw" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.572207 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tfr6b\" (UniqueName: \"kubernetes.io/projected/67e0d9bc-63c7-4509-b804-d63705caa189-kube-api-access-tfr6b\") pod \"openshift-controller-manager-operator-756b6f6bc6-2jmxt\" (UID: \"67e0d9bc-63c7-4509-b804-d63705caa189\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-2jmxt" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.572270 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q9d9m\" (UniqueName: \"kubernetes.io/projected/5fb1ac4f-b0d1-4314-ac06-d887654fa3f5-kube-api-access-q9d9m\") pod \"machine-approver-56656f9798-2f67z\" (UID: \"5fb1ac4f-b0d1-4314-ac06-d887654fa3f5\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-2f67z" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.572321 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/f90412cd-2f6a-4322-b3b7-29904de3b09c-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-xzfcs\" (UID: \"f90412cd-2f6a-4322-b3b7-29904de3b09c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xzfcs" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.572356 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49755973-5d14-4c72-9858-7edca1f2c2ee-trusted-ca-bundle\") pod \"apiserver-76f77b778f-n4hfz\" (UID: \"49755973-5d14-4c72-9858-7edca1f2c2ee\") " pod="openshift-apiserver/apiserver-76f77b778f-n4hfz" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.572467 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f90412cd-2f6a-4322-b3b7-29904de3b09c-serving-cert\") pod \"apiserver-7bbb656c7d-xzfcs\" (UID: \"f90412cd-2f6a-4322-b3b7-29904de3b09c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xzfcs" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.572542 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/27505683-e595-4855-8a29-aceee78542b6-config\") pod \"route-controller-manager-6576b87f9c-pkhrx\" (UID: \"27505683-e595-4855-8a29-aceee78542b6\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-pkhrx" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.572566 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.572594 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7bafdf97-6219-440a-a3b0-49c55c2a3b5b-config\") pod \"openshift-apiserver-operator-796bbdcf4f-ws6wr\" (UID: \"7bafdf97-6219-440a-a3b0-49c55c2a3b5b\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-ws6wr" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.572655 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2797c45d-e1d7-44d7-b936-44048593f540-client-ca\") pod \"controller-manager-879f6c89f-2hhkn\" (UID: \"2797c45d-e1d7-44d7-b936-44048593f540\") " pod="openshift-controller-manager/controller-manager-879f6c89f-2hhkn" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.572760 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/49755973-5d14-4c72-9858-7edca1f2c2ee-image-import-ca\") pod \"apiserver-76f77b778f-n4hfz\" (UID: \"49755973-5d14-4c72-9858-7edca1f2c2ee\") " pod="openshift-apiserver/apiserver-76f77b778f-n4hfz" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.572814 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/49755973-5d14-4c72-9858-7edca1f2c2ee-etcd-client\") pod \"apiserver-76f77b778f-n4hfz\" (UID: \"49755973-5d14-4c72-9858-7edca1f2c2ee\") " pod="openshift-apiserver/apiserver-76f77b778f-n4hfz" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.572858 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fkp6g\" (UniqueName: \"kubernetes.io/projected/aa722226-d2d2-4122-93fa-1aeee25b7868-kube-api-access-fkp6g\") pod \"console-operator-58897d9998-mdhlw\" (UID: \"aa722226-d2d2-4122-93fa-1aeee25b7868\") " pod="openshift-console-operator/console-operator-58897d9998-mdhlw" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.572901 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f90412cd-2f6a-4322-b3b7-29904de3b09c-audit-dir\") pod \"apiserver-7bbb656c7d-xzfcs\" (UID: \"f90412cd-2f6a-4322-b3b7-29904de3b09c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xzfcs" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.572968 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-46v55\" (UniqueName: \"kubernetes.io/projected/b590e4d5-1684-4e2f-b5e9-8fbf00db4546-kube-api-access-46v55\") pod \"machine-api-operator-5694c8668f-2mj56\" (UID: \"b590e4d5-1684-4e2f-b5e9-8fbf00db4546\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-2mj56" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.573020 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/aa722226-d2d2-4122-93fa-1aeee25b7868-trusted-ca\") pod \"console-operator-58897d9998-mdhlw\" (UID: \"aa722226-d2d2-4122-93fa-1aeee25b7868\") " pod="openshift-console-operator/console-operator-58897d9998-mdhlw" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.573073 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/49755973-5d14-4c72-9858-7edca1f2c2ee-audit-dir\") pod \"apiserver-76f77b778f-n4hfz\" (UID: \"49755973-5d14-4c72-9858-7edca1f2c2ee\") " pod="openshift-apiserver/apiserver-76f77b778f-n4hfz" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.573936 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.574486 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-7tk5h"] Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.577554 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-7tk5h" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.582269 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-xqg79"] Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.589290 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.590442 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-xqg79" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.591706 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.592079 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-pxhmv"] Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.592962 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-pxhmv" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.593946 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.596499 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.597365 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.604672 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-hf4jk"] Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.608911 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-6qdd5"] Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.609345 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-6qdd5" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.609594 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-hf4jk" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.610591 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-pkhrx"] Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.611425 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-cd92z"] Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.611977 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-cd92z" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.615460 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-k85tm"] Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.615847 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.622372 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-k85tm" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.624542 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-r4vsk"] Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.625583 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-r4vsk" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.631234 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-2mj6l"] Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.632240 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-2mj6l" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.632265 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29335965-cqjjn"] Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.632898 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29335965-cqjjn" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.637410 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-97x45"] Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.638700 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-jkbfd"] Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.639101 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-97fqd"] Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.639774 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.639955 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-f7bf8"] Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.640003 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-97x45" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.639975 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-97fqd" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.640266 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-jkbfd" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.641062 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-f7bf8" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.641297 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-2q6fr"] Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.641836 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-2q6fr" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.642277 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-ws6wr"] Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.643196 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-2hhkn"] Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.645222 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-2jmxt"] Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.645281 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-rgxlz"] Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.646250 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-xzfcs"] Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.647251 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-pc67v"] Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.648376 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-n4hfz"] Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.651345 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-9qvz2"] Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.652145 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-9qvz2" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.652465 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-h9rlq"] Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.654093 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-s9qnd"] Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.655133 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-2mj56"] Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.655418 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.660263 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-p8tts"] Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.660319 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-zbqhj"] Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.666779 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-2lxzd"] Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.668110 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-9jbmn"] Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.669553 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-thfrv"] Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.671426 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-xqg79"] Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.673247 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-zqjmp"] Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.673325 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-6qdd5"] Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.674266 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-htjqx"] Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.674310 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/f90412cd-2f6a-4322-b3b7-29904de3b09c-etcd-client\") pod \"apiserver-7bbb656c7d-xzfcs\" (UID: \"f90412cd-2f6a-4322-b3b7-29904de3b09c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xzfcs" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.674339 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d4c2p\" (UniqueName: \"kubernetes.io/projected/d32c16ff-cc90-4759-a695-405d76694b39-kube-api-access-d4c2p\") pod \"kube-storage-version-migrator-operator-b67b599dd-thfrv\" (UID: \"d32c16ff-cc90-4759-a695-405d76694b39\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-thfrv" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.674361 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/49755973-5d14-4c72-9858-7edca1f2c2ee-node-pullsecrets\") pod \"apiserver-76f77b778f-n4hfz\" (UID: \"49755973-5d14-4c72-9858-7edca1f2c2ee\") " pod="openshift-apiserver/apiserver-76f77b778f-n4hfz" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.674381 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t6qvd\" (UniqueName: \"kubernetes.io/projected/d05dd104-27f8-410b-8a71-68101c58d906-kube-api-access-t6qvd\") pod \"dns-operator-744455d44c-p8tts\" (UID: \"d05dd104-27f8-410b-8a71-68101c58d906\") " pod="openshift-dns-operator/dns-operator-744455d44c-p8tts" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.674400 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/107de3f1-b5a8-41e4-bb3b-a34e4e916390-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-zqjmp\" (UID: \"107de3f1-b5a8-41e4-bb3b-a34e4e916390\") " pod="openshift-authentication/oauth-openshift-558db77b4-zqjmp" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.674418 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nbpll\" (UniqueName: \"kubernetes.io/projected/f90412cd-2f6a-4322-b3b7-29904de3b09c-kube-api-access-nbpll\") pod \"apiserver-7bbb656c7d-xzfcs\" (UID: \"f90412cd-2f6a-4322-b3b7-29904de3b09c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xzfcs" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.674436 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2797c45d-e1d7-44d7-b936-44048593f540-config\") pod \"controller-manager-879f6c89f-2hhkn\" (UID: \"2797c45d-e1d7-44d7-b936-44048593f540\") " pod="openshift-controller-manager/controller-manager-879f6c89f-2hhkn" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.674452 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/67e0d9bc-63c7-4509-b804-d63705caa189-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-2jmxt\" (UID: \"67e0d9bc-63c7-4509-b804-d63705caa189\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-2jmxt" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.674470 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/d05dd104-27f8-410b-8a71-68101c58d906-metrics-tls\") pod \"dns-operator-744455d44c-p8tts\" (UID: \"d05dd104-27f8-410b-8a71-68101c58d906\") " pod="openshift-dns-operator/dns-operator-744455d44c-p8tts" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.674487 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/107de3f1-b5a8-41e4-bb3b-a34e4e916390-audit-dir\") pod \"oauth-openshift-558db77b4-zqjmp\" (UID: \"107de3f1-b5a8-41e4-bb3b-a34e4e916390\") " pod="openshift-authentication/oauth-openshift-558db77b4-zqjmp" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.674511 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/107de3f1-b5a8-41e4-bb3b-a34e4e916390-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-zqjmp\" (UID: \"107de3f1-b5a8-41e4-bb3b-a34e4e916390\") " pod="openshift-authentication/oauth-openshift-558db77b4-zqjmp" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.674531 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g9k8p\" (UniqueName: \"kubernetes.io/projected/5c2e6635-02f1-4869-9d20-7577116611ba-kube-api-access-g9k8p\") pod \"downloads-7954f5f757-s9qnd\" (UID: \"5c2e6635-02f1-4869-9d20-7577116611ba\") " pod="openshift-console/downloads-7954f5f757-s9qnd" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.674534 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/49755973-5d14-4c72-9858-7edca1f2c2ee-node-pullsecrets\") pod \"apiserver-76f77b778f-n4hfz\" (UID: \"49755973-5d14-4c72-9858-7edca1f2c2ee\") " pod="openshift-apiserver/apiserver-76f77b778f-n4hfz" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.674547 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/b590e4d5-1684-4e2f-b5e9-8fbf00db4546-images\") pod \"machine-api-operator-5694c8668f-2mj56\" (UID: \"b590e4d5-1684-4e2f-b5e9-8fbf00db4546\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-2mj56" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.674570 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7d852\" (UniqueName: \"kubernetes.io/projected/7bafdf97-6219-440a-a3b0-49c55c2a3b5b-kube-api-access-7d852\") pod \"openshift-apiserver-operator-796bbdcf4f-ws6wr\" (UID: \"7bafdf97-6219-440a-a3b0-49c55c2a3b5b\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-ws6wr" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.674590 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ql7bg\" (UniqueName: \"kubernetes.io/projected/49755973-5d14-4c72-9858-7edca1f2c2ee-kube-api-access-ql7bg\") pod \"apiserver-76f77b778f-n4hfz\" (UID: \"49755973-5d14-4c72-9858-7edca1f2c2ee\") " pod="openshift-apiserver/apiserver-76f77b778f-n4hfz" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.674608 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/67e0d9bc-63c7-4509-b804-d63705caa189-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-2jmxt\" (UID: \"67e0d9bc-63c7-4509-b804-d63705caa189\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-2jmxt" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.674625 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/0195bd92-5ff5-4c4b-86a6-360d9620f118-srv-cert\") pod \"catalog-operator-68c6474976-6qdd5\" (UID: \"0195bd92-5ff5-4c4b-86a6-360d9620f118\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-6qdd5" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.674643 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/56d26781-8236-4a07-9dbf-d0b926cba29a-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-9jbmn\" (UID: \"56d26781-8236-4a07-9dbf-d0b926cba29a\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-9jbmn" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.674661 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/391e51dc-8070-40bf-ac61-33c1ef37c72b-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-rgxlz\" (UID: \"391e51dc-8070-40bf-ac61-33c1ef37c72b\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-rgxlz" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.674679 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/29ad03db-a1b8-4cf1-b603-d9e1e61359db-webhook-cert\") pod \"packageserver-d55dfcdfc-hf4jk\" (UID: \"29ad03db-a1b8-4cf1-b603-d9e1e61359db\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-hf4jk" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.674702 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/27505683-e595-4855-8a29-aceee78542b6-serving-cert\") pod \"route-controller-manager-6576b87f9c-pkhrx\" (UID: \"27505683-e595-4855-8a29-aceee78542b6\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-pkhrx" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.674721 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/ccf042b9-768a-413d-bc29-58ab74c06fc9-console-serving-cert\") pod \"console-f9d7485db-pc67v\" (UID: \"ccf042b9-768a-413d-bc29-58ab74c06fc9\") " pod="openshift-console/console-f9d7485db-pc67v" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.674737 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/ccf042b9-768a-413d-bc29-58ab74c06fc9-service-ca\") pod \"console-f9d7485db-pc67v\" (UID: \"ccf042b9-768a-413d-bc29-58ab74c06fc9\") " pod="openshift-console/console-f9d7485db-pc67v" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.674754 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/e011176f-c96e-4823-89f6-648d574d1ef4-default-certificate\") pod \"router-default-5444994796-pxhmv\" (UID: \"e011176f-c96e-4823-89f6-648d574d1ef4\") " pod="openshift-ingress/router-default-5444994796-pxhmv" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.674776 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2797c45d-e1d7-44d7-b936-44048593f540-serving-cert\") pod \"controller-manager-879f6c89f-2hhkn\" (UID: \"2797c45d-e1d7-44d7-b936-44048593f540\") " pod="openshift-controller-manager/controller-manager-879f6c89f-2hhkn" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.674796 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/107de3f1-b5a8-41e4-bb3b-a34e4e916390-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-zqjmp\" (UID: \"107de3f1-b5a8-41e4-bb3b-a34e4e916390\") " pod="openshift-authentication/oauth-openshift-558db77b4-zqjmp" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.674826 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/107de3f1-b5a8-41e4-bb3b-a34e4e916390-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-zqjmp\" (UID: \"107de3f1-b5a8-41e4-bb3b-a34e4e916390\") " pod="openshift-authentication/oauth-openshift-558db77b4-zqjmp" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.674846 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/56d26781-8236-4a07-9dbf-d0b926cba29a-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-9jbmn\" (UID: \"56d26781-8236-4a07-9dbf-d0b926cba29a\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-9jbmn" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.674864 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p5r9g\" (UniqueName: \"kubernetes.io/projected/2797c45d-e1d7-44d7-b936-44048593f540-kube-api-access-p5r9g\") pod \"controller-manager-879f6c89f-2hhkn\" (UID: \"2797c45d-e1d7-44d7-b936-44048593f540\") " pod="openshift-controller-manager/controller-manager-879f6c89f-2hhkn" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.674881 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/88ee50e1-3036-4557-8dbb-6aefcc8df336-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-htjqx\" (UID: \"88ee50e1-3036-4557-8dbb-6aefcc8df336\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-htjqx" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.674918 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aa722226-d2d2-4122-93fa-1aeee25b7868-config\") pod \"console-operator-58897d9998-mdhlw\" (UID: \"aa722226-d2d2-4122-93fa-1aeee25b7868\") " pod="openshift-console-operator/console-operator-58897d9998-mdhlw" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.674937 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/107de3f1-b5a8-41e4-bb3b-a34e4e916390-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-zqjmp\" (UID: \"107de3f1-b5a8-41e4-bb3b-a34e4e916390\") " pod="openshift-authentication/oauth-openshift-558db77b4-zqjmp" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.674955 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tfr6b\" (UniqueName: \"kubernetes.io/projected/67e0d9bc-63c7-4509-b804-d63705caa189-kube-api-access-tfr6b\") pod \"openshift-controller-manager-operator-756b6f6bc6-2jmxt\" (UID: \"67e0d9bc-63c7-4509-b804-d63705caa189\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-2jmxt" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.674971 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/ccf042b9-768a-413d-bc29-58ab74c06fc9-console-oauth-config\") pod \"console-f9d7485db-pc67v\" (UID: \"ccf042b9-768a-413d-bc29-58ab74c06fc9\") " pod="openshift-console/console-f9d7485db-pc67v" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.674987 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/107de3f1-b5a8-41e4-bb3b-a34e4e916390-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-zqjmp\" (UID: \"107de3f1-b5a8-41e4-bb3b-a34e4e916390\") " pod="openshift-authentication/oauth-openshift-558db77b4-zqjmp" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.675002 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/107de3f1-b5a8-41e4-bb3b-a34e4e916390-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-zqjmp\" (UID: \"107de3f1-b5a8-41e4-bb3b-a34e4e916390\") " pod="openshift-authentication/oauth-openshift-558db77b4-zqjmp" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.675019 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q9d9m\" (UniqueName: \"kubernetes.io/projected/5fb1ac4f-b0d1-4314-ac06-d887654fa3f5-kube-api-access-q9d9m\") pod \"machine-approver-56656f9798-2f67z\" (UID: \"5fb1ac4f-b0d1-4314-ac06-d887654fa3f5\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-2f67z" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.675035 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mpkq7\" (UniqueName: \"kubernetes.io/projected/0195bd92-5ff5-4c4b-86a6-360d9620f118-kube-api-access-mpkq7\") pod \"catalog-operator-68c6474976-6qdd5\" (UID: \"0195bd92-5ff5-4c4b-86a6-360d9620f118\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-6qdd5" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.675052 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/107de3f1-b5a8-41e4-bb3b-a34e4e916390-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-zqjmp\" (UID: \"107de3f1-b5a8-41e4-bb3b-a34e4e916390\") " pod="openshift-authentication/oauth-openshift-558db77b4-zqjmp" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.675069 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/f90412cd-2f6a-4322-b3b7-29904de3b09c-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-xzfcs\" (UID: \"f90412cd-2f6a-4322-b3b7-29904de3b09c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xzfcs" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.675087 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f90412cd-2f6a-4322-b3b7-29904de3b09c-serving-cert\") pod \"apiserver-7bbb656c7d-xzfcs\" (UID: \"f90412cd-2f6a-4322-b3b7-29904de3b09c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xzfcs" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.675102 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/27505683-e595-4855-8a29-aceee78542b6-config\") pod \"route-controller-manager-6576b87f9c-pkhrx\" (UID: \"27505683-e595-4855-8a29-aceee78542b6\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-pkhrx" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.675118 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49755973-5d14-4c72-9858-7edca1f2c2ee-trusted-ca-bundle\") pod \"apiserver-76f77b778f-n4hfz\" (UID: \"49755973-5d14-4c72-9858-7edca1f2c2ee\") " pod="openshift-apiserver/apiserver-76f77b778f-n4hfz" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.675136 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7bafdf97-6219-440a-a3b0-49c55c2a3b5b-config\") pod \"openshift-apiserver-operator-796bbdcf4f-ws6wr\" (UID: \"7bafdf97-6219-440a-a3b0-49c55c2a3b5b\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-ws6wr" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.675152 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/29ad03db-a1b8-4cf1-b603-d9e1e61359db-tmpfs\") pod \"packageserver-d55dfcdfc-hf4jk\" (UID: \"29ad03db-a1b8-4cf1-b603-d9e1e61359db\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-hf4jk" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.675168 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qw8z2\" (UniqueName: \"kubernetes.io/projected/29ad03db-a1b8-4cf1-b603-d9e1e61359db-kube-api-access-qw8z2\") pod \"packageserver-d55dfcdfc-hf4jk\" (UID: \"29ad03db-a1b8-4cf1-b603-d9e1e61359db\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-hf4jk" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.675185 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e011176f-c96e-4823-89f6-648d574d1ef4-metrics-certs\") pod \"router-default-5444994796-pxhmv\" (UID: \"e011176f-c96e-4823-89f6-648d574d1ef4\") " pod="openshift-ingress/router-default-5444994796-pxhmv" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.675201 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/29ad03db-a1b8-4cf1-b603-d9e1e61359db-apiservice-cert\") pod \"packageserver-d55dfcdfc-hf4jk\" (UID: \"29ad03db-a1b8-4cf1-b603-d9e1e61359db\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-hf4jk" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.675217 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2797c45d-e1d7-44d7-b936-44048593f540-client-ca\") pod \"controller-manager-879f6c89f-2hhkn\" (UID: \"2797c45d-e1d7-44d7-b936-44048593f540\") " pod="openshift-controller-manager/controller-manager-879f6c89f-2hhkn" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.675235 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/49755973-5d14-4c72-9858-7edca1f2c2ee-image-import-ca\") pod \"apiserver-76f77b778f-n4hfz\" (UID: \"49755973-5d14-4c72-9858-7edca1f2c2ee\") " pod="openshift-apiserver/apiserver-76f77b778f-n4hfz" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.675254 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fkp6g\" (UniqueName: \"kubernetes.io/projected/aa722226-d2d2-4122-93fa-1aeee25b7868-kube-api-access-fkp6g\") pod \"console-operator-58897d9998-mdhlw\" (UID: \"aa722226-d2d2-4122-93fa-1aeee25b7868\") " pod="openshift-console-operator/console-operator-58897d9998-mdhlw" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.675279 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/49755973-5d14-4c72-9858-7edca1f2c2ee-etcd-client\") pod \"apiserver-76f77b778f-n4hfz\" (UID: \"49755973-5d14-4c72-9858-7edca1f2c2ee\") " pod="openshift-apiserver/apiserver-76f77b778f-n4hfz" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.675297 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-46v55\" (UniqueName: \"kubernetes.io/projected/b590e4d5-1684-4e2f-b5e9-8fbf00db4546-kube-api-access-46v55\") pod \"machine-api-operator-5694c8668f-2mj56\" (UID: \"b590e4d5-1684-4e2f-b5e9-8fbf00db4546\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-2mj56" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.675315 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/aa722226-d2d2-4122-93fa-1aeee25b7868-trusted-ca\") pod \"console-operator-58897d9998-mdhlw\" (UID: \"aa722226-d2d2-4122-93fa-1aeee25b7868\") " pod="openshift-console-operator/console-operator-58897d9998-mdhlw" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.675366 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/0195bd92-5ff5-4c4b-86a6-360d9620f118-profile-collector-cert\") pod \"catalog-operator-68c6474976-6qdd5\" (UID: \"0195bd92-5ff5-4c4b-86a6-360d9620f118\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-6qdd5" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.675394 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f90412cd-2f6a-4322-b3b7-29904de3b09c-audit-dir\") pod \"apiserver-7bbb656c7d-xzfcs\" (UID: \"f90412cd-2f6a-4322-b3b7-29904de3b09c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xzfcs" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.675416 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/ccf042b9-768a-413d-bc29-58ab74c06fc9-console-config\") pod \"console-f9d7485db-pc67v\" (UID: \"ccf042b9-768a-413d-bc29-58ab74c06fc9\") " pod="openshift-console/console-f9d7485db-pc67v" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.675437 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/49755973-5d14-4c72-9858-7edca1f2c2ee-audit-dir\") pod \"apiserver-76f77b778f-n4hfz\" (UID: \"49755973-5d14-4c72-9858-7edca1f2c2ee\") " pod="openshift-apiserver/apiserver-76f77b778f-n4hfz" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.675735 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-2mj6l"] Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.676256 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-mdhlw"] Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.676305 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.677079 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/f90412cd-2f6a-4322-b3b7-29904de3b09c-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-xzfcs\" (UID: \"f90412cd-2f6a-4322-b3b7-29904de3b09c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xzfcs" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.677313 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4lsqd"] Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.677620 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2797c45d-e1d7-44d7-b936-44048593f540-config\") pod \"controller-manager-879f6c89f-2hhkn\" (UID: \"2797c45d-e1d7-44d7-b936-44048593f540\") " pod="openshift-controller-manager/controller-manager-879f6c89f-2hhkn" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.677855 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/b590e4d5-1684-4e2f-b5e9-8fbf00db4546-images\") pod \"machine-api-operator-5694c8668f-2mj56\" (UID: \"b590e4d5-1684-4e2f-b5e9-8fbf00db4546\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-2mj56" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.677882 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49755973-5d14-4c72-9858-7edca1f2c2ee-trusted-ca-bundle\") pod \"apiserver-76f77b778f-n4hfz\" (UID: \"49755973-5d14-4c72-9858-7edca1f2c2ee\") " pod="openshift-apiserver/apiserver-76f77b778f-n4hfz" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.678031 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/27505683-e595-4855-8a29-aceee78542b6-config\") pod \"route-controller-manager-6576b87f9c-pkhrx\" (UID: \"27505683-e595-4855-8a29-aceee78542b6\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-pkhrx" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.678578 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/67e0d9bc-63c7-4509-b804-d63705caa189-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-2jmxt\" (UID: \"67e0d9bc-63c7-4509-b804-d63705caa189\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-2jmxt" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.679088 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aa722226-d2d2-4122-93fa-1aeee25b7868-config\") pod \"console-operator-58897d9998-mdhlw\" (UID: \"aa722226-d2d2-4122-93fa-1aeee25b7868\") " pod="openshift-console-operator/console-operator-58897d9998-mdhlw" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.679135 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-r4vsk"] Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.679340 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2797c45d-e1d7-44d7-b936-44048593f540-client-ca\") pod \"controller-manager-879f6c89f-2hhkn\" (UID: \"2797c45d-e1d7-44d7-b936-44048593f540\") " pod="openshift-controller-manager/controller-manager-879f6c89f-2hhkn" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.679722 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-27wgh"] Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.679897 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/49755973-5d14-4c72-9858-7edca1f2c2ee-image-import-ca\") pod \"apiserver-76f77b778f-n4hfz\" (UID: \"49755973-5d14-4c72-9858-7edca1f2c2ee\") " pod="openshift-apiserver/apiserver-76f77b778f-n4hfz" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.680439 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7bafdf97-6219-440a-a3b0-49c55c2a3b5b-config\") pod \"openshift-apiserver-operator-796bbdcf4f-ws6wr\" (UID: \"7bafdf97-6219-440a-a3b0-49c55c2a3b5b\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-ws6wr" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.680944 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d32c16ff-cc90-4759-a695-405d76694b39-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-thfrv\" (UID: \"d32c16ff-cc90-4759-a695-405d76694b39\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-thfrv" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.681018 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/49755973-5d14-4c72-9858-7edca1f2c2ee-audit-dir\") pod \"apiserver-76f77b778f-n4hfz\" (UID: \"49755973-5d14-4c72-9858-7edca1f2c2ee\") " pod="openshift-apiserver/apiserver-76f77b778f-n4hfz" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.681042 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/107de3f1-b5a8-41e4-bb3b-a34e4e916390-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-zqjmp\" (UID: \"107de3f1-b5a8-41e4-bb3b-a34e4e916390\") " pod="openshift-authentication/oauth-openshift-558db77b4-zqjmp" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.681058 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f90412cd-2f6a-4322-b3b7-29904de3b09c-audit-dir\") pod \"apiserver-7bbb656c7d-xzfcs\" (UID: \"f90412cd-2f6a-4322-b3b7-29904de3b09c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xzfcs" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.681102 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-psnct\" (UniqueName: \"kubernetes.io/projected/ccf042b9-768a-413d-bc29-58ab74c06fc9-kube-api-access-psnct\") pod \"console-f9d7485db-pc67v\" (UID: \"ccf042b9-768a-413d-bc29-58ab74c06fc9\") " pod="openshift-console/console-f9d7485db-pc67v" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.681127 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2797c45d-e1d7-44d7-b936-44048593f540-serving-cert\") pod \"controller-manager-879f6c89f-2hhkn\" (UID: \"2797c45d-e1d7-44d7-b936-44048593f540\") " pod="openshift-controller-manager/controller-manager-879f6c89f-2hhkn" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.681171 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-f7bf8"] Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.681215 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/49755973-5d14-4c72-9858-7edca1f2c2ee-encryption-config\") pod \"apiserver-76f77b778f-n4hfz\" (UID: \"49755973-5d14-4c72-9858-7edca1f2c2ee\") " pod="openshift-apiserver/apiserver-76f77b778f-n4hfz" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.681271 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/5fb1ac4f-b0d1-4314-ac06-d887654fa3f5-machine-approver-tls\") pod \"machine-approver-56656f9798-2f67z\" (UID: \"5fb1ac4f-b0d1-4314-ac06-d887654fa3f5\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-2f67z" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.681301 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/ccf042b9-768a-413d-bc29-58ab74c06fc9-oauth-serving-cert\") pod \"console-f9d7485db-pc67v\" (UID: \"ccf042b9-768a-413d-bc29-58ab74c06fc9\") " pod="openshift-console/console-f9d7485db-pc67v" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.681329 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e011176f-c96e-4823-89f6-648d574d1ef4-service-ca-bundle\") pod \"router-default-5444994796-pxhmv\" (UID: \"e011176f-c96e-4823-89f6-648d574d1ef4\") " pod="openshift-ingress/router-default-5444994796-pxhmv" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.681378 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/49755973-5d14-4c72-9858-7edca1f2c2ee-config\") pod \"apiserver-76f77b778f-n4hfz\" (UID: \"49755973-5d14-4c72-9858-7edca1f2c2ee\") " pod="openshift-apiserver/apiserver-76f77b778f-n4hfz" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.681400 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/88ee50e1-3036-4557-8dbb-6aefcc8df336-service-ca-bundle\") pod \"authentication-operator-69f744f599-htjqx\" (UID: \"88ee50e1-3036-4557-8dbb-6aefcc8df336\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-htjqx" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.681408 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/aa722226-d2d2-4122-93fa-1aeee25b7868-trusted-ca\") pod \"console-operator-58897d9998-mdhlw\" (UID: \"aa722226-d2d2-4122-93fa-1aeee25b7868\") " pod="openshift-console-operator/console-operator-58897d9998-mdhlw" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.681420 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/49755973-5d14-4c72-9858-7edca1f2c2ee-serving-cert\") pod \"apiserver-76f77b778f-n4hfz\" (UID: \"49755973-5d14-4c72-9858-7edca1f2c2ee\") " pod="openshift-apiserver/apiserver-76f77b778f-n4hfz" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.681443 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/5fb1ac4f-b0d1-4314-ac06-d887654fa3f5-auth-proxy-config\") pod \"machine-approver-56656f9798-2f67z\" (UID: \"5fb1ac4f-b0d1-4314-ac06-d887654fa3f5\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-2f67z" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.681466 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7bafdf97-6219-440a-a3b0-49c55c2a3b5b-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-ws6wr\" (UID: \"7bafdf97-6219-440a-a3b0-49c55c2a3b5b\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-ws6wr" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.681487 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qtrnx\" (UniqueName: \"kubernetes.io/projected/e011176f-c96e-4823-89f6-648d574d1ef4-kube-api-access-qtrnx\") pod \"router-default-5444994796-pxhmv\" (UID: \"e011176f-c96e-4823-89f6-648d574d1ef4\") " pod="openshift-ingress/router-default-5444994796-pxhmv" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.681523 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/56d26781-8236-4a07-9dbf-d0b926cba29a-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-9jbmn\" (UID: \"56d26781-8236-4a07-9dbf-d0b926cba29a\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-9jbmn" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.681546 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/107de3f1-b5a8-41e4-bb3b-a34e4e916390-audit-policies\") pod \"oauth-openshift-558db77b4-zqjmp\" (UID: \"107de3f1-b5a8-41e4-bb3b-a34e4e916390\") " pod="openshift-authentication/oauth-openshift-558db77b4-zqjmp" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.681564 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/107de3f1-b5a8-41e4-bb3b-a34e4e916390-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-zqjmp\" (UID: \"107de3f1-b5a8-41e4-bb3b-a34e4e916390\") " pod="openshift-authentication/oauth-openshift-558db77b4-zqjmp" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.681586 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xkfhz\" (UniqueName: \"kubernetes.io/projected/88ee50e1-3036-4557-8dbb-6aefcc8df336-kube-api-access-xkfhz\") pod \"authentication-operator-69f744f599-htjqx\" (UID: \"88ee50e1-3036-4557-8dbb-6aefcc8df336\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-htjqx" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.681611 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f90412cd-2f6a-4322-b3b7-29904de3b09c-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-xzfcs\" (UID: \"f90412cd-2f6a-4322-b3b7-29904de3b09c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xzfcs" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.681638 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/2797c45d-e1d7-44d7-b936-44048593f540-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-2hhkn\" (UID: \"2797c45d-e1d7-44d7-b936-44048593f540\") " pod="openshift-controller-manager/controller-manager-879f6c89f-2hhkn" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.681658 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b590e4d5-1684-4e2f-b5e9-8fbf00db4546-config\") pod \"machine-api-operator-5694c8668f-2mj56\" (UID: \"b590e4d5-1684-4e2f-b5e9-8fbf00db4546\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-2mj56" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.681682 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8jnx4\" (UniqueName: \"kubernetes.io/projected/27505683-e595-4855-8a29-aceee78542b6-kube-api-access-8jnx4\") pod \"route-controller-manager-6576b87f9c-pkhrx\" (UID: \"27505683-e595-4855-8a29-aceee78542b6\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-pkhrx" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.681702 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ccf042b9-768a-413d-bc29-58ab74c06fc9-trusted-ca-bundle\") pod \"console-f9d7485db-pc67v\" (UID: \"ccf042b9-768a-413d-bc29-58ab74c06fc9\") " pod="openshift-console/console-f9d7485db-pc67v" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.681719 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/88ee50e1-3036-4557-8dbb-6aefcc8df336-config\") pod \"authentication-operator-69f744f599-htjqx\" (UID: \"88ee50e1-3036-4557-8dbb-6aefcc8df336\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-htjqx" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.681744 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5fb1ac4f-b0d1-4314-ac06-d887654fa3f5-config\") pod \"machine-approver-56656f9798-2f67z\" (UID: \"5fb1ac4f-b0d1-4314-ac06-d887654fa3f5\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-2f67z" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.681776 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/27505683-e595-4855-8a29-aceee78542b6-client-ca\") pod \"route-controller-manager-6576b87f9c-pkhrx\" (UID: \"27505683-e595-4855-8a29-aceee78542b6\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-pkhrx" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.681795 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/88ee50e1-3036-4557-8dbb-6aefcc8df336-serving-cert\") pod \"authentication-operator-69f744f599-htjqx\" (UID: \"88ee50e1-3036-4557-8dbb-6aefcc8df336\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-htjqx" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.681840 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/f90412cd-2f6a-4322-b3b7-29904de3b09c-encryption-config\") pod \"apiserver-7bbb656c7d-xzfcs\" (UID: \"f90412cd-2f6a-4322-b3b7-29904de3b09c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xzfcs" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.681857 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/49755973-5d14-4c72-9858-7edca1f2c2ee-audit\") pod \"apiserver-76f77b778f-n4hfz\" (UID: \"49755973-5d14-4c72-9858-7edca1f2c2ee\") " pod="openshift-apiserver/apiserver-76f77b778f-n4hfz" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.681875 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/49755973-5d14-4c72-9858-7edca1f2c2ee-etcd-serving-ca\") pod \"apiserver-76f77b778f-n4hfz\" (UID: \"49755973-5d14-4c72-9858-7edca1f2c2ee\") " pod="openshift-apiserver/apiserver-76f77b778f-n4hfz" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.681894 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/49755973-5d14-4c72-9858-7edca1f2c2ee-config\") pod \"apiserver-76f77b778f-n4hfz\" (UID: \"49755973-5d14-4c72-9858-7edca1f2c2ee\") " pod="openshift-apiserver/apiserver-76f77b778f-n4hfz" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.681903 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-445ts\" (UniqueName: \"kubernetes.io/projected/391e51dc-8070-40bf-ac61-33c1ef37c72b-kube-api-access-445ts\") pod \"cluster-samples-operator-665b6dd947-rgxlz\" (UID: \"391e51dc-8070-40bf-ac61-33c1ef37c72b\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-rgxlz" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.682955 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/b590e4d5-1684-4e2f-b5e9-8fbf00db4546-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-2mj56\" (UID: \"b590e4d5-1684-4e2f-b5e9-8fbf00db4546\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-2mj56" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.682981 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/f90412cd-2f6a-4322-b3b7-29904de3b09c-audit-policies\") pod \"apiserver-7bbb656c7d-xzfcs\" (UID: \"f90412cd-2f6a-4322-b3b7-29904de3b09c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xzfcs" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.683007 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d32c16ff-cc90-4759-a695-405d76694b39-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-thfrv\" (UID: \"d32c16ff-cc90-4759-a695-405d76694b39\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-thfrv" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.683025 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/e011176f-c96e-4823-89f6-648d574d1ef4-stats-auth\") pod \"router-default-5444994796-pxhmv\" (UID: \"e011176f-c96e-4823-89f6-648d574d1ef4\") " pod="openshift-ingress/router-default-5444994796-pxhmv" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.683047 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/107de3f1-b5a8-41e4-bb3b-a34e4e916390-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-zqjmp\" (UID: \"107de3f1-b5a8-41e4-bb3b-a34e4e916390\") " pod="openshift-authentication/oauth-openshift-558db77b4-zqjmp" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.683070 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wppnl\" (UniqueName: \"kubernetes.io/projected/107de3f1-b5a8-41e4-bb3b-a34e4e916390-kube-api-access-wppnl\") pod \"oauth-openshift-558db77b4-zqjmp\" (UID: \"107de3f1-b5a8-41e4-bb3b-a34e4e916390\") " pod="openshift-authentication/oauth-openshift-558db77b4-zqjmp" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.683112 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/aa722226-d2d2-4122-93fa-1aeee25b7868-serving-cert\") pod \"console-operator-58897d9998-mdhlw\" (UID: \"aa722226-d2d2-4122-93fa-1aeee25b7868\") " pod="openshift-console-operator/console-operator-58897d9998-mdhlw" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.683452 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5fb1ac4f-b0d1-4314-ac06-d887654fa3f5-config\") pod \"machine-approver-56656f9798-2f67z\" (UID: \"5fb1ac4f-b0d1-4314-ac06-d887654fa3f5\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-2f67z" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.682556 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b590e4d5-1684-4e2f-b5e9-8fbf00db4546-config\") pod \"machine-api-operator-5694c8668f-2mj56\" (UID: \"b590e4d5-1684-4e2f-b5e9-8fbf00db4546\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-2mj56" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.683677 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/49755973-5d14-4c72-9858-7edca1f2c2ee-etcd-client\") pod \"apiserver-76f77b778f-n4hfz\" (UID: \"49755973-5d14-4c72-9858-7edca1f2c2ee\") " pod="openshift-apiserver/apiserver-76f77b778f-n4hfz" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.683754 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/27505683-e595-4855-8a29-aceee78542b6-client-ca\") pod \"route-controller-manager-6576b87f9c-pkhrx\" (UID: \"27505683-e595-4855-8a29-aceee78542b6\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-pkhrx" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.683773 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f90412cd-2f6a-4322-b3b7-29904de3b09c-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-xzfcs\" (UID: \"f90412cd-2f6a-4322-b3b7-29904de3b09c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xzfcs" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.682664 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/391e51dc-8070-40bf-ac61-33c1ef37c72b-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-rgxlz\" (UID: \"391e51dc-8070-40bf-ac61-33c1ef37c72b\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-rgxlz" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.682773 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-cd92z"] Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.683996 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-7tk5h"] Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.684266 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/2797c45d-e1d7-44d7-b936-44048593f540-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-2hhkn\" (UID: \"2797c45d-e1d7-44d7-b936-44048593f540\") " pod="openshift-controller-manager/controller-manager-879f6c89f-2hhkn" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.684456 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/5fb1ac4f-b0d1-4314-ac06-d887654fa3f5-auth-proxy-config\") pod \"machine-approver-56656f9798-2f67z\" (UID: \"5fb1ac4f-b0d1-4314-ac06-d887654fa3f5\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-2f67z" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.684495 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/49755973-5d14-4c72-9858-7edca1f2c2ee-encryption-config\") pod \"apiserver-76f77b778f-n4hfz\" (UID: \"49755973-5d14-4c72-9858-7edca1f2c2ee\") " pod="openshift-apiserver/apiserver-76f77b778f-n4hfz" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.684526 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/f90412cd-2f6a-4322-b3b7-29904de3b09c-audit-policies\") pod \"apiserver-7bbb656c7d-xzfcs\" (UID: \"f90412cd-2f6a-4322-b3b7-29904de3b09c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xzfcs" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.684538 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/49755973-5d14-4c72-9858-7edca1f2c2ee-audit\") pod \"apiserver-76f77b778f-n4hfz\" (UID: \"49755973-5d14-4c72-9858-7edca1f2c2ee\") " pod="openshift-apiserver/apiserver-76f77b778f-n4hfz" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.684781 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-k85tm"] Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.684810 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/49755973-5d14-4c72-9858-7edca1f2c2ee-etcd-serving-ca\") pod \"apiserver-76f77b778f-n4hfz\" (UID: \"49755973-5d14-4c72-9858-7edca1f2c2ee\") " pod="openshift-apiserver/apiserver-76f77b778f-n4hfz" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.684862 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/27505683-e595-4855-8a29-aceee78542b6-serving-cert\") pod \"route-controller-manager-6576b87f9c-pkhrx\" (UID: \"27505683-e595-4855-8a29-aceee78542b6\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-pkhrx" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.684953 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/49755973-5d14-4c72-9858-7edca1f2c2ee-serving-cert\") pod \"apiserver-76f77b778f-n4hfz\" (UID: \"49755973-5d14-4c72-9858-7edca1f2c2ee\") " pod="openshift-apiserver/apiserver-76f77b778f-n4hfz" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.685262 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/f90412cd-2f6a-4322-b3b7-29904de3b09c-encryption-config\") pod \"apiserver-7bbb656c7d-xzfcs\" (UID: \"f90412cd-2f6a-4322-b3b7-29904de3b09c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xzfcs" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.685556 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7bafdf97-6219-440a-a3b0-49c55c2a3b5b-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-ws6wr\" (UID: \"7bafdf97-6219-440a-a3b0-49c55c2a3b5b\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-ws6wr" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.686063 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/f90412cd-2f6a-4322-b3b7-29904de3b09c-etcd-client\") pod \"apiserver-7bbb656c7d-xzfcs\" (UID: \"f90412cd-2f6a-4322-b3b7-29904de3b09c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xzfcs" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.686158 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-97fqd"] Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.686343 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f90412cd-2f6a-4322-b3b7-29904de3b09c-serving-cert\") pod \"apiserver-7bbb656c7d-xzfcs\" (UID: \"f90412cd-2f6a-4322-b3b7-29904de3b09c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xzfcs" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.687189 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/67e0d9bc-63c7-4509-b804-d63705caa189-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-2jmxt\" (UID: \"67e0d9bc-63c7-4509-b804-d63705caa189\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-2jmxt" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.687572 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/5fb1ac4f-b0d1-4314-ac06-d887654fa3f5-machine-approver-tls\") pod \"machine-approver-56656f9798-2f67z\" (UID: \"5fb1ac4f-b0d1-4314-ac06-d887654fa3f5\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-2f67z" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.687610 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29335965-cqjjn"] Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.688093 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/b590e4d5-1684-4e2f-b5e9-8fbf00db4546-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-2mj56\" (UID: \"b590e4d5-1684-4e2f-b5e9-8fbf00db4546\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-2mj56" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.688455 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-xx5gz"] Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.688544 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/aa722226-d2d2-4122-93fa-1aeee25b7868-serving-cert\") pod \"console-operator-58897d9998-mdhlw\" (UID: \"aa722226-d2d2-4122-93fa-1aeee25b7868\") " pod="openshift-console-operator/console-operator-58897d9998-mdhlw" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.689521 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-xx5gz" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.690504 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-w55bl"] Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.693707 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-hf4jk"] Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.693729 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-2q6fr"] Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.693804 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-w55bl" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.694062 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-v5ktt"] Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.700371 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.700540 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-9qvz2"] Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.701162 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-xx5gz"] Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.702230 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-jkbfd"] Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.715844 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-97x45"] Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.716497 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-8fhsm"] Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.717592 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-8fhsm" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.718047 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.718121 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-8fhsm"] Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.736085 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.755617 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.778892 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.783892 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/0195bd92-5ff5-4c4b-86a6-360d9620f118-profile-collector-cert\") pod \"catalog-operator-68c6474976-6qdd5\" (UID: \"0195bd92-5ff5-4c4b-86a6-360d9620f118\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-6qdd5" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.783931 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/ccf042b9-768a-413d-bc29-58ab74c06fc9-console-config\") pod \"console-f9d7485db-pc67v\" (UID: \"ccf042b9-768a-413d-bc29-58ab74c06fc9\") " pod="openshift-console/console-f9d7485db-pc67v" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.783951 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d32c16ff-cc90-4759-a695-405d76694b39-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-thfrv\" (UID: \"d32c16ff-cc90-4759-a695-405d76694b39\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-thfrv" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.783971 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/107de3f1-b5a8-41e4-bb3b-a34e4e916390-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-zqjmp\" (UID: \"107de3f1-b5a8-41e4-bb3b-a34e4e916390\") " pod="openshift-authentication/oauth-openshift-558db77b4-zqjmp" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.783988 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-psnct\" (UniqueName: \"kubernetes.io/projected/ccf042b9-768a-413d-bc29-58ab74c06fc9-kube-api-access-psnct\") pod \"console-f9d7485db-pc67v\" (UID: \"ccf042b9-768a-413d-bc29-58ab74c06fc9\") " pod="openshift-console/console-f9d7485db-pc67v" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.784004 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/ccf042b9-768a-413d-bc29-58ab74c06fc9-oauth-serving-cert\") pod \"console-f9d7485db-pc67v\" (UID: \"ccf042b9-768a-413d-bc29-58ab74c06fc9\") " pod="openshift-console/console-f9d7485db-pc67v" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.784019 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e011176f-c96e-4823-89f6-648d574d1ef4-service-ca-bundle\") pod \"router-default-5444994796-pxhmv\" (UID: \"e011176f-c96e-4823-89f6-648d574d1ef4\") " pod="openshift-ingress/router-default-5444994796-pxhmv" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.784040 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/88ee50e1-3036-4557-8dbb-6aefcc8df336-service-ca-bundle\") pod \"authentication-operator-69f744f599-htjqx\" (UID: \"88ee50e1-3036-4557-8dbb-6aefcc8df336\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-htjqx" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.784057 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qtrnx\" (UniqueName: \"kubernetes.io/projected/e011176f-c96e-4823-89f6-648d574d1ef4-kube-api-access-qtrnx\") pod \"router-default-5444994796-pxhmv\" (UID: \"e011176f-c96e-4823-89f6-648d574d1ef4\") " pod="openshift-ingress/router-default-5444994796-pxhmv" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.784079 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/56d26781-8236-4a07-9dbf-d0b926cba29a-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-9jbmn\" (UID: \"56d26781-8236-4a07-9dbf-d0b926cba29a\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-9jbmn" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.784093 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/107de3f1-b5a8-41e4-bb3b-a34e4e916390-audit-policies\") pod \"oauth-openshift-558db77b4-zqjmp\" (UID: \"107de3f1-b5a8-41e4-bb3b-a34e4e916390\") " pod="openshift-authentication/oauth-openshift-558db77b4-zqjmp" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.784110 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/107de3f1-b5a8-41e4-bb3b-a34e4e916390-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-zqjmp\" (UID: \"107de3f1-b5a8-41e4-bb3b-a34e4e916390\") " pod="openshift-authentication/oauth-openshift-558db77b4-zqjmp" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.784124 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xkfhz\" (UniqueName: \"kubernetes.io/projected/88ee50e1-3036-4557-8dbb-6aefcc8df336-kube-api-access-xkfhz\") pod \"authentication-operator-69f744f599-htjqx\" (UID: \"88ee50e1-3036-4557-8dbb-6aefcc8df336\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-htjqx" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.784145 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ccf042b9-768a-413d-bc29-58ab74c06fc9-trusted-ca-bundle\") pod \"console-f9d7485db-pc67v\" (UID: \"ccf042b9-768a-413d-bc29-58ab74c06fc9\") " pod="openshift-console/console-f9d7485db-pc67v" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.784159 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/88ee50e1-3036-4557-8dbb-6aefcc8df336-config\") pod \"authentication-operator-69f744f599-htjqx\" (UID: \"88ee50e1-3036-4557-8dbb-6aefcc8df336\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-htjqx" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.784177 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/88ee50e1-3036-4557-8dbb-6aefcc8df336-serving-cert\") pod \"authentication-operator-69f744f599-htjqx\" (UID: \"88ee50e1-3036-4557-8dbb-6aefcc8df336\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-htjqx" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.784205 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d32c16ff-cc90-4759-a695-405d76694b39-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-thfrv\" (UID: \"d32c16ff-cc90-4759-a695-405d76694b39\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-thfrv" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.784220 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/e011176f-c96e-4823-89f6-648d574d1ef4-stats-auth\") pod \"router-default-5444994796-pxhmv\" (UID: \"e011176f-c96e-4823-89f6-648d574d1ef4\") " pod="openshift-ingress/router-default-5444994796-pxhmv" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.784238 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/107de3f1-b5a8-41e4-bb3b-a34e4e916390-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-zqjmp\" (UID: \"107de3f1-b5a8-41e4-bb3b-a34e4e916390\") " pod="openshift-authentication/oauth-openshift-558db77b4-zqjmp" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.784253 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wppnl\" (UniqueName: \"kubernetes.io/projected/107de3f1-b5a8-41e4-bb3b-a34e4e916390-kube-api-access-wppnl\") pod \"oauth-openshift-558db77b4-zqjmp\" (UID: \"107de3f1-b5a8-41e4-bb3b-a34e4e916390\") " pod="openshift-authentication/oauth-openshift-558db77b4-zqjmp" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.784270 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d4c2p\" (UniqueName: \"kubernetes.io/projected/d32c16ff-cc90-4759-a695-405d76694b39-kube-api-access-d4c2p\") pod \"kube-storage-version-migrator-operator-b67b599dd-thfrv\" (UID: \"d32c16ff-cc90-4759-a695-405d76694b39\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-thfrv" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.784286 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t6qvd\" (UniqueName: \"kubernetes.io/projected/d05dd104-27f8-410b-8a71-68101c58d906-kube-api-access-t6qvd\") pod \"dns-operator-744455d44c-p8tts\" (UID: \"d05dd104-27f8-410b-8a71-68101c58d906\") " pod="openshift-dns-operator/dns-operator-744455d44c-p8tts" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.784302 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/107de3f1-b5a8-41e4-bb3b-a34e4e916390-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-zqjmp\" (UID: \"107de3f1-b5a8-41e4-bb3b-a34e4e916390\") " pod="openshift-authentication/oauth-openshift-558db77b4-zqjmp" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.784322 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/d05dd104-27f8-410b-8a71-68101c58d906-metrics-tls\") pod \"dns-operator-744455d44c-p8tts\" (UID: \"d05dd104-27f8-410b-8a71-68101c58d906\") " pod="openshift-dns-operator/dns-operator-744455d44c-p8tts" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.784337 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/107de3f1-b5a8-41e4-bb3b-a34e4e916390-audit-dir\") pod \"oauth-openshift-558db77b4-zqjmp\" (UID: \"107de3f1-b5a8-41e4-bb3b-a34e4e916390\") " pod="openshift-authentication/oauth-openshift-558db77b4-zqjmp" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.784352 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/107de3f1-b5a8-41e4-bb3b-a34e4e916390-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-zqjmp\" (UID: \"107de3f1-b5a8-41e4-bb3b-a34e4e916390\") " pod="openshift-authentication/oauth-openshift-558db77b4-zqjmp" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.784382 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/0195bd92-5ff5-4c4b-86a6-360d9620f118-srv-cert\") pod \"catalog-operator-68c6474976-6qdd5\" (UID: \"0195bd92-5ff5-4c4b-86a6-360d9620f118\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-6qdd5" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.784398 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/56d26781-8236-4a07-9dbf-d0b926cba29a-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-9jbmn\" (UID: \"56d26781-8236-4a07-9dbf-d0b926cba29a\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-9jbmn" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.784413 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/29ad03db-a1b8-4cf1-b603-d9e1e61359db-webhook-cert\") pod \"packageserver-d55dfcdfc-hf4jk\" (UID: \"29ad03db-a1b8-4cf1-b603-d9e1e61359db\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-hf4jk" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.784429 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/ccf042b9-768a-413d-bc29-58ab74c06fc9-console-serving-cert\") pod \"console-f9d7485db-pc67v\" (UID: \"ccf042b9-768a-413d-bc29-58ab74c06fc9\") " pod="openshift-console/console-f9d7485db-pc67v" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.784442 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/ccf042b9-768a-413d-bc29-58ab74c06fc9-service-ca\") pod \"console-f9d7485db-pc67v\" (UID: \"ccf042b9-768a-413d-bc29-58ab74c06fc9\") " pod="openshift-console/console-f9d7485db-pc67v" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.784457 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/e011176f-c96e-4823-89f6-648d574d1ef4-default-certificate\") pod \"router-default-5444994796-pxhmv\" (UID: \"e011176f-c96e-4823-89f6-648d574d1ef4\") " pod="openshift-ingress/router-default-5444994796-pxhmv" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.784473 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/107de3f1-b5a8-41e4-bb3b-a34e4e916390-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-zqjmp\" (UID: \"107de3f1-b5a8-41e4-bb3b-a34e4e916390\") " pod="openshift-authentication/oauth-openshift-558db77b4-zqjmp" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.784489 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/107de3f1-b5a8-41e4-bb3b-a34e4e916390-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-zqjmp\" (UID: \"107de3f1-b5a8-41e4-bb3b-a34e4e916390\") " pod="openshift-authentication/oauth-openshift-558db77b4-zqjmp" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.784506 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/56d26781-8236-4a07-9dbf-d0b926cba29a-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-9jbmn\" (UID: \"56d26781-8236-4a07-9dbf-d0b926cba29a\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-9jbmn" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.784528 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/88ee50e1-3036-4557-8dbb-6aefcc8df336-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-htjqx\" (UID: \"88ee50e1-3036-4557-8dbb-6aefcc8df336\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-htjqx" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.784544 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/107de3f1-b5a8-41e4-bb3b-a34e4e916390-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-zqjmp\" (UID: \"107de3f1-b5a8-41e4-bb3b-a34e4e916390\") " pod="openshift-authentication/oauth-openshift-558db77b4-zqjmp" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.784565 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/ccf042b9-768a-413d-bc29-58ab74c06fc9-console-oauth-config\") pod \"console-f9d7485db-pc67v\" (UID: \"ccf042b9-768a-413d-bc29-58ab74c06fc9\") " pod="openshift-console/console-f9d7485db-pc67v" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.784579 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/107de3f1-b5a8-41e4-bb3b-a34e4e916390-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-zqjmp\" (UID: \"107de3f1-b5a8-41e4-bb3b-a34e4e916390\") " pod="openshift-authentication/oauth-openshift-558db77b4-zqjmp" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.784595 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/107de3f1-b5a8-41e4-bb3b-a34e4e916390-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-zqjmp\" (UID: \"107de3f1-b5a8-41e4-bb3b-a34e4e916390\") " pod="openshift-authentication/oauth-openshift-558db77b4-zqjmp" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.784616 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mpkq7\" (UniqueName: \"kubernetes.io/projected/0195bd92-5ff5-4c4b-86a6-360d9620f118-kube-api-access-mpkq7\") pod \"catalog-operator-68c6474976-6qdd5\" (UID: \"0195bd92-5ff5-4c4b-86a6-360d9620f118\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-6qdd5" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.784631 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/107de3f1-b5a8-41e4-bb3b-a34e4e916390-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-zqjmp\" (UID: \"107de3f1-b5a8-41e4-bb3b-a34e4e916390\") " pod="openshift-authentication/oauth-openshift-558db77b4-zqjmp" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.784650 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/29ad03db-a1b8-4cf1-b603-d9e1e61359db-tmpfs\") pod \"packageserver-d55dfcdfc-hf4jk\" (UID: \"29ad03db-a1b8-4cf1-b603-d9e1e61359db\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-hf4jk" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.784668 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qw8z2\" (UniqueName: \"kubernetes.io/projected/29ad03db-a1b8-4cf1-b603-d9e1e61359db-kube-api-access-qw8z2\") pod \"packageserver-d55dfcdfc-hf4jk\" (UID: \"29ad03db-a1b8-4cf1-b603-d9e1e61359db\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-hf4jk" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.784684 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e011176f-c96e-4823-89f6-648d574d1ef4-metrics-certs\") pod \"router-default-5444994796-pxhmv\" (UID: \"e011176f-c96e-4823-89f6-648d574d1ef4\") " pod="openshift-ingress/router-default-5444994796-pxhmv" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.784700 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/29ad03db-a1b8-4cf1-b603-d9e1e61359db-apiservice-cert\") pod \"packageserver-d55dfcdfc-hf4jk\" (UID: \"29ad03db-a1b8-4cf1-b603-d9e1e61359db\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-hf4jk" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.784851 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/107de3f1-b5a8-41e4-bb3b-a34e4e916390-audit-dir\") pod \"oauth-openshift-558db77b4-zqjmp\" (UID: \"107de3f1-b5a8-41e4-bb3b-a34e4e916390\") " pod="openshift-authentication/oauth-openshift-558db77b4-zqjmp" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.785881 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/ccf042b9-768a-413d-bc29-58ab74c06fc9-service-ca\") pod \"console-f9d7485db-pc67v\" (UID: \"ccf042b9-768a-413d-bc29-58ab74c06fc9\") " pod="openshift-console/console-f9d7485db-pc67v" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.786126 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/88ee50e1-3036-4557-8dbb-6aefcc8df336-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-htjqx\" (UID: \"88ee50e1-3036-4557-8dbb-6aefcc8df336\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-htjqx" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.786847 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/107de3f1-b5a8-41e4-bb3b-a34e4e916390-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-zqjmp\" (UID: \"107de3f1-b5a8-41e4-bb3b-a34e4e916390\") " pod="openshift-authentication/oauth-openshift-558db77b4-zqjmp" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.787276 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/107de3f1-b5a8-41e4-bb3b-a34e4e916390-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-zqjmp\" (UID: \"107de3f1-b5a8-41e4-bb3b-a34e4e916390\") " pod="openshift-authentication/oauth-openshift-558db77b4-zqjmp" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.787507 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/ccf042b9-768a-413d-bc29-58ab74c06fc9-console-config\") pod \"console-f9d7485db-pc67v\" (UID: \"ccf042b9-768a-413d-bc29-58ab74c06fc9\") " pod="openshift-console/console-f9d7485db-pc67v" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.787587 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d32c16ff-cc90-4759-a695-405d76694b39-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-thfrv\" (UID: \"d32c16ff-cc90-4759-a695-405d76694b39\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-thfrv" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.788047 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/107de3f1-b5a8-41e4-bb3b-a34e4e916390-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-zqjmp\" (UID: \"107de3f1-b5a8-41e4-bb3b-a34e4e916390\") " pod="openshift-authentication/oauth-openshift-558db77b4-zqjmp" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.788199 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/88ee50e1-3036-4557-8dbb-6aefcc8df336-serving-cert\") pod \"authentication-operator-69f744f599-htjqx\" (UID: \"88ee50e1-3036-4557-8dbb-6aefcc8df336\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-htjqx" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.788790 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/107de3f1-b5a8-41e4-bb3b-a34e4e916390-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-zqjmp\" (UID: \"107de3f1-b5a8-41e4-bb3b-a34e4e916390\") " pod="openshift-authentication/oauth-openshift-558db77b4-zqjmp" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.789036 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/ccf042b9-768a-413d-bc29-58ab74c06fc9-oauth-serving-cert\") pod \"console-f9d7485db-pc67v\" (UID: \"ccf042b9-768a-413d-bc29-58ab74c06fc9\") " pod="openshift-console/console-f9d7485db-pc67v" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.789594 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/88ee50e1-3036-4557-8dbb-6aefcc8df336-service-ca-bundle\") pod \"authentication-operator-69f744f599-htjqx\" (UID: \"88ee50e1-3036-4557-8dbb-6aefcc8df336\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-htjqx" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.789696 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/29ad03db-a1b8-4cf1-b603-d9e1e61359db-tmpfs\") pod \"packageserver-d55dfcdfc-hf4jk\" (UID: \"29ad03db-a1b8-4cf1-b603-d9e1e61359db\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-hf4jk" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.789857 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/ccf042b9-768a-413d-bc29-58ab74c06fc9-console-oauth-config\") pod \"console-f9d7485db-pc67v\" (UID: \"ccf042b9-768a-413d-bc29-58ab74c06fc9\") " pod="openshift-console/console-f9d7485db-pc67v" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.790118 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/107de3f1-b5a8-41e4-bb3b-a34e4e916390-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-zqjmp\" (UID: \"107de3f1-b5a8-41e4-bb3b-a34e4e916390\") " pod="openshift-authentication/oauth-openshift-558db77b4-zqjmp" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.790311 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/107de3f1-b5a8-41e4-bb3b-a34e4e916390-audit-policies\") pod \"oauth-openshift-558db77b4-zqjmp\" (UID: \"107de3f1-b5a8-41e4-bb3b-a34e4e916390\") " pod="openshift-authentication/oauth-openshift-558db77b4-zqjmp" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.790367 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/107de3f1-b5a8-41e4-bb3b-a34e4e916390-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-zqjmp\" (UID: \"107de3f1-b5a8-41e4-bb3b-a34e4e916390\") " pod="openshift-authentication/oauth-openshift-558db77b4-zqjmp" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.790752 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/107de3f1-b5a8-41e4-bb3b-a34e4e916390-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-zqjmp\" (UID: \"107de3f1-b5a8-41e4-bb3b-a34e4e916390\") " pod="openshift-authentication/oauth-openshift-558db77b4-zqjmp" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.790947 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d32c16ff-cc90-4759-a695-405d76694b39-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-thfrv\" (UID: \"d32c16ff-cc90-4759-a695-405d76694b39\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-thfrv" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.790951 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ccf042b9-768a-413d-bc29-58ab74c06fc9-trusted-ca-bundle\") pod \"console-f9d7485db-pc67v\" (UID: \"ccf042b9-768a-413d-bc29-58ab74c06fc9\") " pod="openshift-console/console-f9d7485db-pc67v" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.791259 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/88ee50e1-3036-4557-8dbb-6aefcc8df336-config\") pod \"authentication-operator-69f744f599-htjqx\" (UID: \"88ee50e1-3036-4557-8dbb-6aefcc8df336\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-htjqx" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.791504 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/107de3f1-b5a8-41e4-bb3b-a34e4e916390-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-zqjmp\" (UID: \"107de3f1-b5a8-41e4-bb3b-a34e4e916390\") " pod="openshift-authentication/oauth-openshift-558db77b4-zqjmp" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.792421 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/107de3f1-b5a8-41e4-bb3b-a34e4e916390-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-zqjmp\" (UID: \"107de3f1-b5a8-41e4-bb3b-a34e4e916390\") " pod="openshift-authentication/oauth-openshift-558db77b4-zqjmp" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.792715 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/ccf042b9-768a-413d-bc29-58ab74c06fc9-console-serving-cert\") pod \"console-f9d7485db-pc67v\" (UID: \"ccf042b9-768a-413d-bc29-58ab74c06fc9\") " pod="openshift-console/console-f9d7485db-pc67v" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.793430 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/107de3f1-b5a8-41e4-bb3b-a34e4e916390-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-zqjmp\" (UID: \"107de3f1-b5a8-41e4-bb3b-a34e4e916390\") " pod="openshift-authentication/oauth-openshift-558db77b4-zqjmp" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.793475 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/107de3f1-b5a8-41e4-bb3b-a34e4e916390-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-zqjmp\" (UID: \"107de3f1-b5a8-41e4-bb3b-a34e4e916390\") " pod="openshift-authentication/oauth-openshift-558db77b4-zqjmp" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.816268 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.835698 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.855285 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.867633 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/d05dd104-27f8-410b-8a71-68101c58d906-metrics-tls\") pod \"dns-operator-744455d44c-p8tts\" (UID: \"d05dd104-27f8-410b-8a71-68101c58d906\") " pod="openshift-dns-operator/dns-operator-744455d44c-p8tts" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.876289 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.895602 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.916403 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.935643 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.938485 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/56d26781-8236-4a07-9dbf-d0b926cba29a-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-9jbmn\" (UID: \"56d26781-8236-4a07-9dbf-d0b926cba29a\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-9jbmn" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.956934 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.976037 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.980006 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/56d26781-8236-4a07-9dbf-d0b926cba29a-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-9jbmn\" (UID: \"56d26781-8236-4a07-9dbf-d0b926cba29a\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-9jbmn" Oct 11 04:53:38 crc kubenswrapper[4651]: I1011 04:53:38.996495 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Oct 11 04:53:39 crc kubenswrapper[4651]: I1011 04:53:39.015784 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Oct 11 04:53:39 crc kubenswrapper[4651]: I1011 04:53:39.036737 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Oct 11 04:53:39 crc kubenswrapper[4651]: I1011 04:53:39.056253 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Oct 11 04:53:39 crc kubenswrapper[4651]: I1011 04:53:39.076191 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Oct 11 04:53:39 crc kubenswrapper[4651]: I1011 04:53:39.096767 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Oct 11 04:53:39 crc kubenswrapper[4651]: I1011 04:53:39.117243 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Oct 11 04:53:39 crc kubenswrapper[4651]: I1011 04:53:39.135635 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Oct 11 04:53:39 crc kubenswrapper[4651]: I1011 04:53:39.165335 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Oct 11 04:53:39 crc kubenswrapper[4651]: I1011 04:53:39.177584 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Oct 11 04:53:39 crc kubenswrapper[4651]: I1011 04:53:39.196673 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Oct 11 04:53:39 crc kubenswrapper[4651]: I1011 04:53:39.216283 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Oct 11 04:53:39 crc kubenswrapper[4651]: I1011 04:53:39.219542 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/e011176f-c96e-4823-89f6-648d574d1ef4-default-certificate\") pod \"router-default-5444994796-pxhmv\" (UID: \"e011176f-c96e-4823-89f6-648d574d1ef4\") " pod="openshift-ingress/router-default-5444994796-pxhmv" Oct 11 04:53:39 crc kubenswrapper[4651]: I1011 04:53:39.236467 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Oct 11 04:53:39 crc kubenswrapper[4651]: I1011 04:53:39.244700 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/e011176f-c96e-4823-89f6-648d574d1ef4-stats-auth\") pod \"router-default-5444994796-pxhmv\" (UID: \"e011176f-c96e-4823-89f6-648d574d1ef4\") " pod="openshift-ingress/router-default-5444994796-pxhmv" Oct 11 04:53:39 crc kubenswrapper[4651]: I1011 04:53:39.256169 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Oct 11 04:53:39 crc kubenswrapper[4651]: I1011 04:53:39.265061 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e011176f-c96e-4823-89f6-648d574d1ef4-metrics-certs\") pod \"router-default-5444994796-pxhmv\" (UID: \"e011176f-c96e-4823-89f6-648d574d1ef4\") " pod="openshift-ingress/router-default-5444994796-pxhmv" Oct 11 04:53:39 crc kubenswrapper[4651]: I1011 04:53:39.277149 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Oct 11 04:53:39 crc kubenswrapper[4651]: I1011 04:53:39.297412 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Oct 11 04:53:39 crc kubenswrapper[4651]: I1011 04:53:39.300611 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e011176f-c96e-4823-89f6-648d574d1ef4-service-ca-bundle\") pod \"router-default-5444994796-pxhmv\" (UID: \"e011176f-c96e-4823-89f6-648d574d1ef4\") " pod="openshift-ingress/router-default-5444994796-pxhmv" Oct 11 04:53:39 crc kubenswrapper[4651]: I1011 04:53:39.317553 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Oct 11 04:53:39 crc kubenswrapper[4651]: I1011 04:53:39.336436 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Oct 11 04:53:39 crc kubenswrapper[4651]: I1011 04:53:39.356605 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Oct 11 04:53:39 crc kubenswrapper[4651]: I1011 04:53:39.370170 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/29ad03db-a1b8-4cf1-b603-d9e1e61359db-apiservice-cert\") pod \"packageserver-d55dfcdfc-hf4jk\" (UID: \"29ad03db-a1b8-4cf1-b603-d9e1e61359db\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-hf4jk" Oct 11 04:53:39 crc kubenswrapper[4651]: I1011 04:53:39.370228 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/29ad03db-a1b8-4cf1-b603-d9e1e61359db-webhook-cert\") pod \"packageserver-d55dfcdfc-hf4jk\" (UID: \"29ad03db-a1b8-4cf1-b603-d9e1e61359db\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-hf4jk" Oct 11 04:53:39 crc kubenswrapper[4651]: I1011 04:53:39.376938 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Oct 11 04:53:39 crc kubenswrapper[4651]: I1011 04:53:39.389271 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/0195bd92-5ff5-4c4b-86a6-360d9620f118-srv-cert\") pod \"catalog-operator-68c6474976-6qdd5\" (UID: \"0195bd92-5ff5-4c4b-86a6-360d9620f118\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-6qdd5" Oct 11 04:53:39 crc kubenswrapper[4651]: I1011 04:53:39.397164 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Oct 11 04:53:39 crc kubenswrapper[4651]: I1011 04:53:39.416367 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Oct 11 04:53:39 crc kubenswrapper[4651]: I1011 04:53:39.427724 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/0195bd92-5ff5-4c4b-86a6-360d9620f118-profile-collector-cert\") pod \"catalog-operator-68c6474976-6qdd5\" (UID: \"0195bd92-5ff5-4c4b-86a6-360d9620f118\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-6qdd5" Oct 11 04:53:39 crc kubenswrapper[4651]: I1011 04:53:39.437280 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Oct 11 04:53:39 crc kubenswrapper[4651]: I1011 04:53:39.478491 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Oct 11 04:53:39 crc kubenswrapper[4651]: I1011 04:53:39.496990 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Oct 11 04:53:39 crc kubenswrapper[4651]: I1011 04:53:39.527273 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Oct 11 04:53:39 crc kubenswrapper[4651]: I1011 04:53:39.536813 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Oct 11 04:53:39 crc kubenswrapper[4651]: I1011 04:53:39.555905 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Oct 11 04:53:39 crc kubenswrapper[4651]: I1011 04:53:39.576472 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Oct 11 04:53:39 crc kubenswrapper[4651]: I1011 04:53:39.597173 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Oct 11 04:53:39 crc kubenswrapper[4651]: I1011 04:53:39.617507 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Oct 11 04:53:39 crc kubenswrapper[4651]: I1011 04:53:39.634520 4651 request.go:700] Waited for 1.008652065s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-machine-config-operator/secrets?fieldSelector=metadata.name%3Dmco-proxy-tls&limit=500&resourceVersion=0 Oct 11 04:53:39 crc kubenswrapper[4651]: I1011 04:53:39.637649 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Oct 11 04:53:39 crc kubenswrapper[4651]: I1011 04:53:39.657872 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Oct 11 04:53:39 crc kubenswrapper[4651]: I1011 04:53:39.677400 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Oct 11 04:53:39 crc kubenswrapper[4651]: I1011 04:53:39.698949 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Oct 11 04:53:39 crc kubenswrapper[4651]: I1011 04:53:39.718326 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Oct 11 04:53:39 crc kubenswrapper[4651]: I1011 04:53:39.737741 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Oct 11 04:53:39 crc kubenswrapper[4651]: I1011 04:53:39.758023 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 11 04:53:39 crc kubenswrapper[4651]: I1011 04:53:39.776811 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 11 04:53:39 crc kubenswrapper[4651]: I1011 04:53:39.797401 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Oct 11 04:53:39 crc kubenswrapper[4651]: I1011 04:53:39.816956 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Oct 11 04:53:39 crc kubenswrapper[4651]: I1011 04:53:39.836524 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Oct 11 04:53:39 crc kubenswrapper[4651]: I1011 04:53:39.857588 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Oct 11 04:53:39 crc kubenswrapper[4651]: I1011 04:53:39.877903 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Oct 11 04:53:39 crc kubenswrapper[4651]: I1011 04:53:39.896566 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Oct 11 04:53:39 crc kubenswrapper[4651]: I1011 04:53:39.917493 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Oct 11 04:53:39 crc kubenswrapper[4651]: I1011 04:53:39.936500 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Oct 11 04:53:39 crc kubenswrapper[4651]: I1011 04:53:39.957391 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Oct 11 04:53:39 crc kubenswrapper[4651]: I1011 04:53:39.977324 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Oct 11 04:53:39 crc kubenswrapper[4651]: I1011 04:53:39.996192 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Oct 11 04:53:40 crc kubenswrapper[4651]: I1011 04:53:40.017878 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Oct 11 04:53:40 crc kubenswrapper[4651]: I1011 04:53:40.036953 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Oct 11 04:53:40 crc kubenswrapper[4651]: I1011 04:53:40.056908 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Oct 11 04:53:40 crc kubenswrapper[4651]: I1011 04:53:40.076203 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Oct 11 04:53:40 crc kubenswrapper[4651]: I1011 04:53:40.096019 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Oct 11 04:53:40 crc kubenswrapper[4651]: I1011 04:53:40.117653 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Oct 11 04:53:40 crc kubenswrapper[4651]: I1011 04:53:40.136308 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Oct 11 04:53:40 crc kubenswrapper[4651]: I1011 04:53:40.156129 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Oct 11 04:53:40 crc kubenswrapper[4651]: I1011 04:53:40.175860 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Oct 11 04:53:40 crc kubenswrapper[4651]: I1011 04:53:40.196783 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Oct 11 04:53:40 crc kubenswrapper[4651]: I1011 04:53:40.217571 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Oct 11 04:53:40 crc kubenswrapper[4651]: I1011 04:53:40.236906 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Oct 11 04:53:40 crc kubenswrapper[4651]: I1011 04:53:40.257132 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Oct 11 04:53:40 crc kubenswrapper[4651]: I1011 04:53:40.277044 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Oct 11 04:53:40 crc kubenswrapper[4651]: I1011 04:53:40.297730 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Oct 11 04:53:40 crc kubenswrapper[4651]: I1011 04:53:40.334073 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tfr6b\" (UniqueName: \"kubernetes.io/projected/67e0d9bc-63c7-4509-b804-d63705caa189-kube-api-access-tfr6b\") pod \"openshift-controller-manager-operator-756b6f6bc6-2jmxt\" (UID: \"67e0d9bc-63c7-4509-b804-d63705caa189\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-2jmxt" Oct 11 04:53:40 crc kubenswrapper[4651]: I1011 04:53:40.357686 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nbpll\" (UniqueName: \"kubernetes.io/projected/f90412cd-2f6a-4322-b3b7-29904de3b09c-kube-api-access-nbpll\") pod \"apiserver-7bbb656c7d-xzfcs\" (UID: \"f90412cd-2f6a-4322-b3b7-29904de3b09c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xzfcs" Oct 11 04:53:40 crc kubenswrapper[4651]: I1011 04:53:40.376017 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g9k8p\" (UniqueName: \"kubernetes.io/projected/5c2e6635-02f1-4869-9d20-7577116611ba-kube-api-access-g9k8p\") pod \"downloads-7954f5f757-s9qnd\" (UID: \"5c2e6635-02f1-4869-9d20-7577116611ba\") " pod="openshift-console/downloads-7954f5f757-s9qnd" Oct 11 04:53:40 crc kubenswrapper[4651]: I1011 04:53:40.392564 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q9d9m\" (UniqueName: \"kubernetes.io/projected/5fb1ac4f-b0d1-4314-ac06-d887654fa3f5-kube-api-access-q9d9m\") pod \"machine-approver-56656f9798-2f67z\" (UID: \"5fb1ac4f-b0d1-4314-ac06-d887654fa3f5\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-2f67z" Oct 11 04:53:40 crc kubenswrapper[4651]: I1011 04:53:40.397291 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-2f67z" Oct 11 04:53:40 crc kubenswrapper[4651]: I1011 04:53:40.412113 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p5r9g\" (UniqueName: \"kubernetes.io/projected/2797c45d-e1d7-44d7-b936-44048593f540-kube-api-access-p5r9g\") pod \"controller-manager-879f6c89f-2hhkn\" (UID: \"2797c45d-e1d7-44d7-b936-44048593f540\") " pod="openshift-controller-manager/controller-manager-879f6c89f-2hhkn" Oct 11 04:53:40 crc kubenswrapper[4651]: I1011 04:53:40.415019 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xzfcs" Oct 11 04:53:40 crc kubenswrapper[4651]: I1011 04:53:40.441802 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7d852\" (UniqueName: \"kubernetes.io/projected/7bafdf97-6219-440a-a3b0-49c55c2a3b5b-kube-api-access-7d852\") pod \"openshift-apiserver-operator-796bbdcf4f-ws6wr\" (UID: \"7bafdf97-6219-440a-a3b0-49c55c2a3b5b\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-ws6wr" Oct 11 04:53:40 crc kubenswrapper[4651]: I1011 04:53:40.449081 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ql7bg\" (UniqueName: \"kubernetes.io/projected/49755973-5d14-4c72-9858-7edca1f2c2ee-kube-api-access-ql7bg\") pod \"apiserver-76f77b778f-n4hfz\" (UID: \"49755973-5d14-4c72-9858-7edca1f2c2ee\") " pod="openshift-apiserver/apiserver-76f77b778f-n4hfz" Oct 11 04:53:40 crc kubenswrapper[4651]: I1011 04:53:40.473624 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fkp6g\" (UniqueName: \"kubernetes.io/projected/aa722226-d2d2-4122-93fa-1aeee25b7868-kube-api-access-fkp6g\") pod \"console-operator-58897d9998-mdhlw\" (UID: \"aa722226-d2d2-4122-93fa-1aeee25b7868\") " pod="openshift-console-operator/console-operator-58897d9998-mdhlw" Oct 11 04:53:40 crc kubenswrapper[4651]: I1011 04:53:40.494642 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-46v55\" (UniqueName: \"kubernetes.io/projected/b590e4d5-1684-4e2f-b5e9-8fbf00db4546-kube-api-access-46v55\") pod \"machine-api-operator-5694c8668f-2mj56\" (UID: \"b590e4d5-1684-4e2f-b5e9-8fbf00db4546\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-2mj56" Oct 11 04:53:40 crc kubenswrapper[4651]: I1011 04:53:40.510714 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-445ts\" (UniqueName: \"kubernetes.io/projected/391e51dc-8070-40bf-ac61-33c1ef37c72b-kube-api-access-445ts\") pod \"cluster-samples-operator-665b6dd947-rgxlz\" (UID: \"391e51dc-8070-40bf-ac61-33c1ef37c72b\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-rgxlz" Oct 11 04:53:40 crc kubenswrapper[4651]: I1011 04:53:40.536185 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Oct 11 04:53:40 crc kubenswrapper[4651]: I1011 04:53:40.538593 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8jnx4\" (UniqueName: \"kubernetes.io/projected/27505683-e595-4855-8a29-aceee78542b6-kube-api-access-8jnx4\") pod \"route-controller-manager-6576b87f9c-pkhrx\" (UID: \"27505683-e595-4855-8a29-aceee78542b6\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-pkhrx" Oct 11 04:53:40 crc kubenswrapper[4651]: I1011 04:53:40.552772 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-pkhrx" Oct 11 04:53:40 crc kubenswrapper[4651]: I1011 04:53:40.558763 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Oct 11 04:53:40 crc kubenswrapper[4651]: I1011 04:53:40.576853 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Oct 11 04:53:40 crc kubenswrapper[4651]: I1011 04:53:40.589864 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-ws6wr" Oct 11 04:53:40 crc kubenswrapper[4651]: I1011 04:53:40.595791 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Oct 11 04:53:40 crc kubenswrapper[4651]: I1011 04:53:40.600278 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-2f67z" event={"ID":"5fb1ac4f-b0d1-4314-ac06-d887654fa3f5","Type":"ContainerStarted","Data":"c4a08eb38608422c3790b0b463f5af639fe5bf464c8ca099a7b511264a0463e7"} Oct 11 04:53:40 crc kubenswrapper[4651]: I1011 04:53:40.613329 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-2jmxt" Oct 11 04:53:40 crc kubenswrapper[4651]: I1011 04:53:40.618919 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Oct 11 04:53:40 crc kubenswrapper[4651]: I1011 04:53:40.633261 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-2hhkn" Oct 11 04:53:40 crc kubenswrapper[4651]: I1011 04:53:40.634748 4651 request.go:700] Waited for 1.940672173s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-machine-config-operator/secrets?fieldSelector=metadata.name%3Dnode-bootstrapper-token&limit=500&resourceVersion=0 Oct 11 04:53:40 crc kubenswrapper[4651]: I1011 04:53:40.636594 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Oct 11 04:53:40 crc kubenswrapper[4651]: I1011 04:53:40.645369 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-2mj56" Oct 11 04:53:40 crc kubenswrapper[4651]: I1011 04:53:40.655918 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Oct 11 04:53:40 crc kubenswrapper[4651]: I1011 04:53:40.661588 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-xzfcs"] Oct 11 04:53:40 crc kubenswrapper[4651]: I1011 04:53:40.670430 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-s9qnd" Oct 11 04:53:40 crc kubenswrapper[4651]: I1011 04:53:40.677775 4651 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Oct 11 04:53:40 crc kubenswrapper[4651]: I1011 04:53:40.716745 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-n4hfz" Oct 11 04:53:40 crc kubenswrapper[4651]: I1011 04:53:40.718355 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Oct 11 04:53:40 crc kubenswrapper[4651]: I1011 04:53:40.724704 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-rgxlz" Oct 11 04:53:40 crc kubenswrapper[4651]: I1011 04:53:40.734353 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mpkq7\" (UniqueName: \"kubernetes.io/projected/0195bd92-5ff5-4c4b-86a6-360d9620f118-kube-api-access-mpkq7\") pod \"catalog-operator-68c6474976-6qdd5\" (UID: \"0195bd92-5ff5-4c4b-86a6-360d9620f118\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-6qdd5" Oct 11 04:53:40 crc kubenswrapper[4651]: I1011 04:53:40.740221 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-mdhlw" Oct 11 04:53:40 crc kubenswrapper[4651]: I1011 04:53:40.753433 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d4c2p\" (UniqueName: \"kubernetes.io/projected/d32c16ff-cc90-4759-a695-405d76694b39-kube-api-access-d4c2p\") pod \"kube-storage-version-migrator-operator-b67b599dd-thfrv\" (UID: \"d32c16ff-cc90-4759-a695-405d76694b39\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-thfrv" Oct 11 04:53:40 crc kubenswrapper[4651]: I1011 04:53:40.776882 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wppnl\" (UniqueName: \"kubernetes.io/projected/107de3f1-b5a8-41e4-bb3b-a34e4e916390-kube-api-access-wppnl\") pod \"oauth-openshift-558db77b4-zqjmp\" (UID: \"107de3f1-b5a8-41e4-bb3b-a34e4e916390\") " pod="openshift-authentication/oauth-openshift-558db77b4-zqjmp" Oct 11 04:53:40 crc kubenswrapper[4651]: I1011 04:53:40.779497 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-pkhrx"] Oct 11 04:53:40 crc kubenswrapper[4651]: I1011 04:53:40.799799 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-psnct\" (UniqueName: \"kubernetes.io/projected/ccf042b9-768a-413d-bc29-58ab74c06fc9-kube-api-access-psnct\") pod \"console-f9d7485db-pc67v\" (UID: \"ccf042b9-768a-413d-bc29-58ab74c06fc9\") " pod="openshift-console/console-f9d7485db-pc67v" Oct 11 04:53:40 crc kubenswrapper[4651]: I1011 04:53:40.810337 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qw8z2\" (UniqueName: \"kubernetes.io/projected/29ad03db-a1b8-4cf1-b603-d9e1e61359db-kube-api-access-qw8z2\") pod \"packageserver-d55dfcdfc-hf4jk\" (UID: \"29ad03db-a1b8-4cf1-b603-d9e1e61359db\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-hf4jk" Oct 11 04:53:40 crc kubenswrapper[4651]: W1011 04:53:40.824066 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod27505683_e595_4855_8a29_aceee78542b6.slice/crio-ad8f515568c1c63c51315eb3691dcc49eb9d576df74d6d7498aa8f5710f827e1 WatchSource:0}: Error finding container ad8f515568c1c63c51315eb3691dcc49eb9d576df74d6d7498aa8f5710f827e1: Status 404 returned error can't find the container with id ad8f515568c1c63c51315eb3691dcc49eb9d576df74d6d7498aa8f5710f827e1 Oct 11 04:53:40 crc kubenswrapper[4651]: I1011 04:53:40.828696 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-thfrv" Oct 11 04:53:40 crc kubenswrapper[4651]: I1011 04:53:40.831444 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qtrnx\" (UniqueName: \"kubernetes.io/projected/e011176f-c96e-4823-89f6-648d574d1ef4-kube-api-access-qtrnx\") pod \"router-default-5444994796-pxhmv\" (UID: \"e011176f-c96e-4823-89f6-648d574d1ef4\") " pod="openshift-ingress/router-default-5444994796-pxhmv" Oct 11 04:53:40 crc kubenswrapper[4651]: I1011 04:53:40.838550 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-zqjmp" Oct 11 04:53:40 crc kubenswrapper[4651]: I1011 04:53:40.854814 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/56d26781-8236-4a07-9dbf-d0b926cba29a-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-9jbmn\" (UID: \"56d26781-8236-4a07-9dbf-d0b926cba29a\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-9jbmn" Oct 11 04:53:40 crc kubenswrapper[4651]: I1011 04:53:40.873461 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xkfhz\" (UniqueName: \"kubernetes.io/projected/88ee50e1-3036-4557-8dbb-6aefcc8df336-kube-api-access-xkfhz\") pod \"authentication-operator-69f744f599-htjqx\" (UID: \"88ee50e1-3036-4557-8dbb-6aefcc8df336\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-htjqx" Oct 11 04:53:40 crc kubenswrapper[4651]: I1011 04:53:40.885159 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-9jbmn" Oct 11 04:53:40 crc kubenswrapper[4651]: I1011 04:53:40.889886 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t6qvd\" (UniqueName: \"kubernetes.io/projected/d05dd104-27f8-410b-8a71-68101c58d906-kube-api-access-t6qvd\") pod \"dns-operator-744455d44c-p8tts\" (UID: \"d05dd104-27f8-410b-8a71-68101c58d906\") " pod="openshift-dns-operator/dns-operator-744455d44c-p8tts" Oct 11 04:53:40 crc kubenswrapper[4651]: I1011 04:53:40.923588 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4adc8e3f-786a-4d1b-985b-3f39cf67767a-trusted-ca\") pod \"image-registry-697d97f7c8-v5ktt\" (UID: \"4adc8e3f-786a-4d1b-985b-3f39cf67767a\") " pod="openshift-image-registry/image-registry-697d97f7c8-v5ktt" Oct 11 04:53:40 crc kubenswrapper[4651]: I1011 04:53:40.923621 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/4adc8e3f-786a-4d1b-985b-3f39cf67767a-registry-tls\") pod \"image-registry-697d97f7c8-v5ktt\" (UID: \"4adc8e3f-786a-4d1b-985b-3f39cf67767a\") " pod="openshift-image-registry/image-registry-697d97f7c8-v5ktt" Oct 11 04:53:40 crc kubenswrapper[4651]: I1011 04:53:40.923643 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/82548e7e-b445-4506-b42c-c9c620e82267-proxy-tls\") pod \"machine-config-controller-84d6567774-zbqhj\" (UID: \"82548e7e-b445-4506-b42c-c9c620e82267\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-zbqhj" Oct 11 04:53:40 crc kubenswrapper[4651]: I1011 04:53:40.923667 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1f6c3e28-3c8b-41d2-a314-95103ddb7ab4-trusted-ca\") pod \"ingress-operator-5b745b69d9-xqg79\" (UID: \"1f6c3e28-3c8b-41d2-a314-95103ddb7ab4\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-xqg79" Oct 11 04:53:40 crc kubenswrapper[4651]: I1011 04:53:40.923686 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/4adc8e3f-786a-4d1b-985b-3f39cf67767a-bound-sa-token\") pod \"image-registry-697d97f7c8-v5ktt\" (UID: \"4adc8e3f-786a-4d1b-985b-3f39cf67767a\") " pod="openshift-image-registry/image-registry-697d97f7c8-v5ktt" Oct 11 04:53:40 crc kubenswrapper[4651]: I1011 04:53:40.923700 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wm6m8\" (UniqueName: \"kubernetes.io/projected/5ad2068c-8217-424f-aa22-dfb57604ea05-kube-api-access-wm6m8\") pod \"multus-admission-controller-857f4d67dd-2lxzd\" (UID: \"5ad2068c-8217-424f-aa22-dfb57604ea05\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-2lxzd" Oct 11 04:53:40 crc kubenswrapper[4651]: I1011 04:53:40.923718 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t8r9t\" (UniqueName: \"kubernetes.io/projected/6cd78244-1fbc-4c7b-81ee-be4a2e1eda22-kube-api-access-t8r9t\") pod \"migrator-59844c95c7-7tk5h\" (UID: \"6cd78244-1fbc-4c7b-81ee-be4a2e1eda22\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-7tk5h" Oct 11 04:53:40 crc kubenswrapper[4651]: I1011 04:53:40.923735 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tz2c4\" (UniqueName: \"kubernetes.io/projected/1f6c3e28-3c8b-41d2-a314-95103ddb7ab4-kube-api-access-tz2c4\") pod \"ingress-operator-5b745b69d9-xqg79\" (UID: \"1f6c3e28-3c8b-41d2-a314-95103ddb7ab4\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-xqg79" Oct 11 04:53:40 crc kubenswrapper[4651]: I1011 04:53:40.923766 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jldzf\" (UniqueName: \"kubernetes.io/projected/fc942eea-0c2a-474f-bc24-c6d5fb171c79-kube-api-access-jldzf\") pod \"openshift-config-operator-7777fb866f-h9rlq\" (UID: \"fc942eea-0c2a-474f-bc24-c6d5fb171c79\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-h9rlq" Oct 11 04:53:40 crc kubenswrapper[4651]: I1011 04:53:40.923805 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/4adc8e3f-786a-4d1b-985b-3f39cf67767a-ca-trust-extracted\") pod \"image-registry-697d97f7c8-v5ktt\" (UID: \"4adc8e3f-786a-4d1b-985b-3f39cf67767a\") " pod="openshift-image-registry/image-registry-697d97f7c8-v5ktt" Oct 11 04:53:40 crc kubenswrapper[4651]: I1011 04:53:40.923842 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/1f6c3e28-3c8b-41d2-a314-95103ddb7ab4-metrics-tls\") pod \"ingress-operator-5b745b69d9-xqg79\" (UID: \"1f6c3e28-3c8b-41d2-a314-95103ddb7ab4\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-xqg79" Oct 11 04:53:40 crc kubenswrapper[4651]: I1011 04:53:40.923911 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fc942eea-0c2a-474f-bc24-c6d5fb171c79-serving-cert\") pod \"openshift-config-operator-7777fb866f-h9rlq\" (UID: \"fc942eea-0c2a-474f-bc24-c6d5fb171c79\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-h9rlq" Oct 11 04:53:40 crc kubenswrapper[4651]: I1011 04:53:40.923929 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-44nls\" (UniqueName: \"kubernetes.io/projected/82548e7e-b445-4506-b42c-c9c620e82267-kube-api-access-44nls\") pod \"machine-config-controller-84d6567774-zbqhj\" (UID: \"82548e7e-b445-4506-b42c-c9c620e82267\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-zbqhj" Oct 11 04:53:40 crc kubenswrapper[4651]: I1011 04:53:40.923947 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/82548e7e-b445-4506-b42c-c9c620e82267-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-zbqhj\" (UID: \"82548e7e-b445-4506-b42c-c9c620e82267\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-zbqhj" Oct 11 04:53:40 crc kubenswrapper[4651]: I1011 04:53:40.924100 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/19358ccd-5ef0-4416-81db-347114a9bbe4-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-4lsqd\" (UID: \"19358ccd-5ef0-4416-81db-347114a9bbe4\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4lsqd" Oct 11 04:53:40 crc kubenswrapper[4651]: I1011 04:53:40.924141 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/5ad2068c-8217-424f-aa22-dfb57604ea05-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-2lxzd\" (UID: \"5ad2068c-8217-424f-aa22-dfb57604ea05\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-2lxzd" Oct 11 04:53:40 crc kubenswrapper[4651]: I1011 04:53:40.924162 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/19358ccd-5ef0-4416-81db-347114a9bbe4-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-4lsqd\" (UID: \"19358ccd-5ef0-4416-81db-347114a9bbe4\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4lsqd" Oct 11 04:53:40 crc kubenswrapper[4651]: I1011 04:53:40.924238 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ssbtk\" (UniqueName: \"kubernetes.io/projected/19358ccd-5ef0-4416-81db-347114a9bbe4-kube-api-access-ssbtk\") pod \"cluster-image-registry-operator-dc59b4c8b-4lsqd\" (UID: \"19358ccd-5ef0-4416-81db-347114a9bbe4\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4lsqd" Oct 11 04:53:40 crc kubenswrapper[4651]: I1011 04:53:40.924263 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-87w7k\" (UniqueName: \"kubernetes.io/projected/f8fd1293-3d68-4dd2-bc12-8f7c02017bcd-kube-api-access-87w7k\") pod \"control-plane-machine-set-operator-78cbb6b69f-27wgh\" (UID: \"f8fd1293-3d68-4dd2-bc12-8f7c02017bcd\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-27wgh" Oct 11 04:53:40 crc kubenswrapper[4651]: I1011 04:53:40.924302 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-v5ktt\" (UID: \"4adc8e3f-786a-4d1b-985b-3f39cf67767a\") " pod="openshift-image-registry/image-registry-697d97f7c8-v5ktt" Oct 11 04:53:40 crc kubenswrapper[4651]: I1011 04:53:40.924325 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/4adc8e3f-786a-4d1b-985b-3f39cf67767a-installation-pull-secrets\") pod \"image-registry-697d97f7c8-v5ktt\" (UID: \"4adc8e3f-786a-4d1b-985b-3f39cf67767a\") " pod="openshift-image-registry/image-registry-697d97f7c8-v5ktt" Oct 11 04:53:40 crc kubenswrapper[4651]: I1011 04:53:40.924350 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fmksb\" (UniqueName: \"kubernetes.io/projected/4adc8e3f-786a-4d1b-985b-3f39cf67767a-kube-api-access-fmksb\") pod \"image-registry-697d97f7c8-v5ktt\" (UID: \"4adc8e3f-786a-4d1b-985b-3f39cf67767a\") " pod="openshift-image-registry/image-registry-697d97f7c8-v5ktt" Oct 11 04:53:40 crc kubenswrapper[4651]: I1011 04:53:40.924406 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/fc942eea-0c2a-474f-bc24-c6d5fb171c79-available-featuregates\") pod \"openshift-config-operator-7777fb866f-h9rlq\" (UID: \"fc942eea-0c2a-474f-bc24-c6d5fb171c79\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-h9rlq" Oct 11 04:53:40 crc kubenswrapper[4651]: I1011 04:53:40.924432 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/f8fd1293-3d68-4dd2-bc12-8f7c02017bcd-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-27wgh\" (UID: \"f8fd1293-3d68-4dd2-bc12-8f7c02017bcd\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-27wgh" Oct 11 04:53:40 crc kubenswrapper[4651]: I1011 04:53:40.924477 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/4adc8e3f-786a-4d1b-985b-3f39cf67767a-registry-certificates\") pod \"image-registry-697d97f7c8-v5ktt\" (UID: \"4adc8e3f-786a-4d1b-985b-3f39cf67767a\") " pod="openshift-image-registry/image-registry-697d97f7c8-v5ktt" Oct 11 04:53:40 crc kubenswrapper[4651]: I1011 04:53:40.924586 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/1f6c3e28-3c8b-41d2-a314-95103ddb7ab4-bound-sa-token\") pod \"ingress-operator-5b745b69d9-xqg79\" (UID: \"1f6c3e28-3c8b-41d2-a314-95103ddb7ab4\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-xqg79" Oct 11 04:53:40 crc kubenswrapper[4651]: I1011 04:53:40.924613 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/19358ccd-5ef0-4416-81db-347114a9bbe4-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-4lsqd\" (UID: \"19358ccd-5ef0-4416-81db-347114a9bbe4\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4lsqd" Oct 11 04:53:40 crc kubenswrapper[4651]: E1011 04:53:40.924753 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 04:53:41.424737877 +0000 UTC m=+142.320970753 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-v5ktt" (UID: "4adc8e3f-786a-4d1b-985b-3f39cf67767a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 04:53:40 crc kubenswrapper[4651]: I1011 04:53:40.933081 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-pxhmv" Oct 11 04:53:40 crc kubenswrapper[4651]: I1011 04:53:40.939688 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-6qdd5" Oct 11 04:53:40 crc kubenswrapper[4651]: I1011 04:53:40.948499 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-hf4jk" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.025347 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.025846 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ssbtk\" (UniqueName: \"kubernetes.io/projected/19358ccd-5ef0-4416-81db-347114a9bbe4-kube-api-access-ssbtk\") pod \"cluster-image-registry-operator-dc59b4c8b-4lsqd\" (UID: \"19358ccd-5ef0-4416-81db-347114a9bbe4\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4lsqd" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.026252 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-87w7k\" (UniqueName: \"kubernetes.io/projected/f8fd1293-3d68-4dd2-bc12-8f7c02017bcd-kube-api-access-87w7k\") pod \"control-plane-machine-set-operator-78cbb6b69f-27wgh\" (UID: \"f8fd1293-3d68-4dd2-bc12-8f7c02017bcd\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-27wgh" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.026355 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/3bc42b99-e1e0-4c88-90fd-8ba933095287-etcd-service-ca\") pod \"etcd-operator-b45778765-97x45\" (UID: \"3bc42b99-e1e0-4c88-90fd-8ba933095287\") " pod="openshift-etcd-operator/etcd-operator-b45778765-97x45" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.026376 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c67w2\" (UniqueName: \"kubernetes.io/projected/f6aa26e9-64fd-4591-9824-bb8b4ddfce0f-kube-api-access-c67w2\") pod \"service-ca-operator-777779d784-2mj6l\" (UID: \"f6aa26e9-64fd-4591-9824-bb8b4ddfce0f\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-2mj6l" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.026561 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/4adc8e3f-786a-4d1b-985b-3f39cf67767a-installation-pull-secrets\") pod \"image-registry-697d97f7c8-v5ktt\" (UID: \"4adc8e3f-786a-4d1b-985b-3f39cf67767a\") " pod="openshift-image-registry/image-registry-697d97f7c8-v5ktt" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.026578 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/3bc42b99-e1e0-4c88-90fd-8ba933095287-etcd-client\") pod \"etcd-operator-b45778765-97x45\" (UID: \"3bc42b99-e1e0-4c88-90fd-8ba933095287\") " pod="openshift-etcd-operator/etcd-operator-b45778765-97x45" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.026633 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/6cfb505f-a515-433b-82ac-792d3b435ce1-srv-cert\") pod \"olm-operator-6b444d44fb-97fqd\" (UID: \"6cfb505f-a515-433b-82ac-792d3b435ce1\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-97fqd" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.026653 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fmksb\" (UniqueName: \"kubernetes.io/projected/4adc8e3f-786a-4d1b-985b-3f39cf67767a-kube-api-access-fmksb\") pod \"image-registry-697d97f7c8-v5ktt\" (UID: \"4adc8e3f-786a-4d1b-985b-3f39cf67767a\") " pod="openshift-image-registry/image-registry-697d97f7c8-v5ktt" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.026714 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/5b16fb63-ca55-4c40-8d92-21477dd79984-auth-proxy-config\") pod \"machine-config-operator-74547568cd-r4vsk\" (UID: \"5b16fb63-ca55-4c40-8d92-21477dd79984\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-r4vsk" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.026729 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f6aa26e9-64fd-4591-9824-bb8b4ddfce0f-config\") pod \"service-ca-operator-777779d784-2mj6l\" (UID: \"f6aa26e9-64fd-4591-9824-bb8b4ddfce0f\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-2mj6l" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.027727 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/fc942eea-0c2a-474f-bc24-c6d5fb171c79-available-featuregates\") pod \"openshift-config-operator-7777fb866f-h9rlq\" (UID: \"fc942eea-0c2a-474f-bc24-c6d5fb171c79\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-h9rlq" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.027764 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/f8fd1293-3d68-4dd2-bc12-8f7c02017bcd-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-27wgh\" (UID: \"f8fd1293-3d68-4dd2-bc12-8f7c02017bcd\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-27wgh" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.027804 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-92r8x\" (UniqueName: \"kubernetes.io/projected/6cfb505f-a515-433b-82ac-792d3b435ce1-kube-api-access-92r8x\") pod \"olm-operator-6b444d44fb-97fqd\" (UID: \"6cfb505f-a515-433b-82ac-792d3b435ce1\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-97fqd" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.028219 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/fc942eea-0c2a-474f-bc24-c6d5fb171c79-available-featuregates\") pod \"openshift-config-operator-7777fb866f-h9rlq\" (UID: \"fc942eea-0c2a-474f-bc24-c6d5fb171c79\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-h9rlq" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.028495 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/4adc8e3f-786a-4d1b-985b-3f39cf67767a-registry-certificates\") pod \"image-registry-697d97f7c8-v5ktt\" (UID: \"4adc8e3f-786a-4d1b-985b-3f39cf67767a\") " pod="openshift-image-registry/image-registry-697d97f7c8-v5ktt" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.028524 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/1f6c3e28-3c8b-41d2-a314-95103ddb7ab4-bound-sa-token\") pod \"ingress-operator-5b745b69d9-xqg79\" (UID: \"1f6c3e28-3c8b-41d2-a314-95103ddb7ab4\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-xqg79" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.029569 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/4adc8e3f-786a-4d1b-985b-3f39cf67767a-registry-certificates\") pod \"image-registry-697d97f7c8-v5ktt\" (UID: \"4adc8e3f-786a-4d1b-985b-3f39cf67767a\") " pod="openshift-image-registry/image-registry-697d97f7c8-v5ktt" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.029719 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/19358ccd-5ef0-4416-81db-347114a9bbe4-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-4lsqd\" (UID: \"19358ccd-5ef0-4416-81db-347114a9bbe4\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4lsqd" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.029798 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4adc8e3f-786a-4d1b-985b-3f39cf67767a-trusted-ca\") pod \"image-registry-697d97f7c8-v5ktt\" (UID: \"4adc8e3f-786a-4d1b-985b-3f39cf67767a\") " pod="openshift-image-registry/image-registry-697d97f7c8-v5ktt" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.030901 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/4adc8e3f-786a-4d1b-985b-3f39cf67767a-registry-tls\") pod \"image-registry-697d97f7c8-v5ktt\" (UID: \"4adc8e3f-786a-4d1b-985b-3f39cf67767a\") " pod="openshift-image-registry/image-registry-697d97f7c8-v5ktt" Oct 11 04:53:41 crc kubenswrapper[4651]: E1011 04:53:41.038174 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 04:53:41.538148235 +0000 UTC m=+142.434381031 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.044130 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/12479473-09c0-4d87-9075-0b37754123a6-csi-data-dir\") pod \"csi-hostpathplugin-8fhsm\" (UID: \"12479473-09c0-4d87-9075-0b37754123a6\") " pod="hostpath-provisioner/csi-hostpathplugin-8fhsm" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.044717 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/82548e7e-b445-4506-b42c-c9c620e82267-proxy-tls\") pod \"machine-config-controller-84d6567774-zbqhj\" (UID: \"82548e7e-b445-4506-b42c-c9c620e82267\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-zbqhj" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.044751 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3bc42b99-e1e0-4c88-90fd-8ba933095287-config\") pod \"etcd-operator-b45778765-97x45\" (UID: \"3bc42b99-e1e0-4c88-90fd-8ba933095287\") " pod="openshift-etcd-operator/etcd-operator-b45778765-97x45" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.073385 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/4adc8e3f-786a-4d1b-985b-3f39cf67767a-registry-tls\") pod \"image-registry-697d97f7c8-v5ktt\" (UID: \"4adc8e3f-786a-4d1b-985b-3f39cf67767a\") " pod="openshift-image-registry/image-registry-697d97f7c8-v5ktt" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.077660 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/f8fd1293-3d68-4dd2-bc12-8f7c02017bcd-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-27wgh\" (UID: \"f8fd1293-3d68-4dd2-bc12-8f7c02017bcd\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-27wgh" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.080112 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4adc8e3f-786a-4d1b-985b-3f39cf67767a-trusted-ca\") pod \"image-registry-697d97f7c8-v5ktt\" (UID: \"4adc8e3f-786a-4d1b-985b-3f39cf67767a\") " pod="openshift-image-registry/image-registry-697d97f7c8-v5ktt" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.080641 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/19358ccd-5ef0-4416-81db-347114a9bbe4-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-4lsqd\" (UID: \"19358ccd-5ef0-4416-81db-347114a9bbe4\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4lsqd" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.082628 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-htjqx" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.083365 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-pc67v" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.083968 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1f6c3e28-3c8b-41d2-a314-95103ddb7ab4-trusted-ca\") pod \"ingress-operator-5b745b69d9-xqg79\" (UID: \"1f6c3e28-3c8b-41d2-a314-95103ddb7ab4\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-xqg79" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.084006 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v88sq\" (UniqueName: \"kubernetes.io/projected/12479473-09c0-4d87-9075-0b37754123a6-kube-api-access-v88sq\") pod \"csi-hostpathplugin-8fhsm\" (UID: \"12479473-09c0-4d87-9075-0b37754123a6\") " pod="hostpath-provisioner/csi-hostpathplugin-8fhsm" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.084142 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/4adc8e3f-786a-4d1b-985b-3f39cf67767a-bound-sa-token\") pod \"image-registry-697d97f7c8-v5ktt\" (UID: \"4adc8e3f-786a-4d1b-985b-3f39cf67767a\") " pod="openshift-image-registry/image-registry-697d97f7c8-v5ktt" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.084179 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wm6m8\" (UniqueName: \"kubernetes.io/projected/5ad2068c-8217-424f-aa22-dfb57604ea05-kube-api-access-wm6m8\") pod \"multus-admission-controller-857f4d67dd-2lxzd\" (UID: \"5ad2068c-8217-424f-aa22-dfb57604ea05\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-2lxzd" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.084366 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t8r9t\" (UniqueName: \"kubernetes.io/projected/6cd78244-1fbc-4c7b-81ee-be4a2e1eda22-kube-api-access-t8r9t\") pod \"migrator-59844c95c7-7tk5h\" (UID: \"6cd78244-1fbc-4c7b-81ee-be4a2e1eda22\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-7tk5h" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.084404 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3bc42b99-e1e0-4c88-90fd-8ba933095287-serving-cert\") pod \"etcd-operator-b45778765-97x45\" (UID: \"3bc42b99-e1e0-4c88-90fd-8ba933095287\") " pod="openshift-etcd-operator/etcd-operator-b45778765-97x45" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.084455 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/5b16fb63-ca55-4c40-8d92-21477dd79984-images\") pod \"machine-config-operator-74547568cd-r4vsk\" (UID: \"5b16fb63-ca55-4c40-8d92-21477dd79984\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-r4vsk" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.084512 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tz2c4\" (UniqueName: \"kubernetes.io/projected/1f6c3e28-3c8b-41d2-a314-95103ddb7ab4-kube-api-access-tz2c4\") pod \"ingress-operator-5b745b69d9-xqg79\" (UID: \"1f6c3e28-3c8b-41d2-a314-95103ddb7ab4\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-xqg79" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.084647 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/5b16fb63-ca55-4c40-8d92-21477dd79984-proxy-tls\") pod \"machine-config-operator-74547568cd-r4vsk\" (UID: \"5b16fb63-ca55-4c40-8d92-21477dd79984\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-r4vsk" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.084742 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/12479473-09c0-4d87-9075-0b37754123a6-registration-dir\") pod \"csi-hostpathplugin-8fhsm\" (UID: \"12479473-09c0-4d87-9075-0b37754123a6\") " pod="hostpath-provisioner/csi-hostpathplugin-8fhsm" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.089508 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/12479473-09c0-4d87-9075-0b37754123a6-mountpoint-dir\") pod \"csi-hostpathplugin-8fhsm\" (UID: \"12479473-09c0-4d87-9075-0b37754123a6\") " pod="hostpath-provisioner/csi-hostpathplugin-8fhsm" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.090726 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1f6c3e28-3c8b-41d2-a314-95103ddb7ab4-trusted-ca\") pod \"ingress-operator-5b745b69d9-xqg79\" (UID: \"1f6c3e28-3c8b-41d2-a314-95103ddb7ab4\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-xqg79" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.091109 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jldzf\" (UniqueName: \"kubernetes.io/projected/fc942eea-0c2a-474f-bc24-c6d5fb171c79-kube-api-access-jldzf\") pod \"openshift-config-operator-7777fb866f-h9rlq\" (UID: \"fc942eea-0c2a-474f-bc24-c6d5fb171c79\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-h9rlq" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.093376 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j2b64\" (UniqueName: \"kubernetes.io/projected/3bc42b99-e1e0-4c88-90fd-8ba933095287-kube-api-access-j2b64\") pod \"etcd-operator-b45778765-97x45\" (UID: \"3bc42b99-e1e0-4c88-90fd-8ba933095287\") " pod="openshift-etcd-operator/etcd-operator-b45778765-97x45" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.093950 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rtt78\" (UniqueName: \"kubernetes.io/projected/5b16fb63-ca55-4c40-8d92-21477dd79984-kube-api-access-rtt78\") pod \"machine-config-operator-74547568cd-r4vsk\" (UID: \"5b16fb63-ca55-4c40-8d92-21477dd79984\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-r4vsk" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.093986 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/4adc8e3f-786a-4d1b-985b-3f39cf67767a-ca-trust-extracted\") pod \"image-registry-697d97f7c8-v5ktt\" (UID: \"4adc8e3f-786a-4d1b-985b-3f39cf67767a\") " pod="openshift-image-registry/image-registry-697d97f7c8-v5ktt" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.094292 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/4adc8e3f-786a-4d1b-985b-3f39cf67767a-ca-trust-extracted\") pod \"image-registry-697d97f7c8-v5ktt\" (UID: \"4adc8e3f-786a-4d1b-985b-3f39cf67767a\") " pod="openshift-image-registry/image-registry-697d97f7c8-v5ktt" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.094659 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/1f6c3e28-3c8b-41d2-a314-95103ddb7ab4-metrics-tls\") pod \"ingress-operator-5b745b69d9-xqg79\" (UID: \"1f6c3e28-3c8b-41d2-a314-95103ddb7ab4\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-xqg79" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.094709 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f6aa26e9-64fd-4591-9824-bb8b4ddfce0f-serving-cert\") pod \"service-ca-operator-777779d784-2mj6l\" (UID: \"f6aa26e9-64fd-4591-9824-bb8b4ddfce0f\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-2mj6l" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.094865 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-ws6wr"] Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.095551 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fc942eea-0c2a-474f-bc24-c6d5fb171c79-serving-cert\") pod \"openshift-config-operator-7777fb866f-h9rlq\" (UID: \"fc942eea-0c2a-474f-bc24-c6d5fb171c79\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-h9rlq" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.095885 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-44nls\" (UniqueName: \"kubernetes.io/projected/82548e7e-b445-4506-b42c-c9c620e82267-kube-api-access-44nls\") pod \"machine-config-controller-84d6567774-zbqhj\" (UID: \"82548e7e-b445-4506-b42c-c9c620e82267\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-zbqhj" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.096290 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/82548e7e-b445-4506-b42c-c9c620e82267-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-zbqhj\" (UID: \"82548e7e-b445-4506-b42c-c9c620e82267\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-zbqhj" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.096377 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/3bc42b99-e1e0-4c88-90fd-8ba933095287-etcd-ca\") pod \"etcd-operator-b45778765-97x45\" (UID: \"3bc42b99-e1e0-4c88-90fd-8ba933095287\") " pod="openshift-etcd-operator/etcd-operator-b45778765-97x45" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.097140 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/82548e7e-b445-4506-b42c-c9c620e82267-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-zbqhj\" (UID: \"82548e7e-b445-4506-b42c-c9c620e82267\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-zbqhj" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.104088 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fc942eea-0c2a-474f-bc24-c6d5fb171c79-serving-cert\") pod \"openshift-config-operator-7777fb866f-h9rlq\" (UID: \"fc942eea-0c2a-474f-bc24-c6d5fb171c79\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-h9rlq" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.105790 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/12479473-09c0-4d87-9075-0b37754123a6-socket-dir\") pod \"csi-hostpathplugin-8fhsm\" (UID: \"12479473-09c0-4d87-9075-0b37754123a6\") " pod="hostpath-provisioner/csi-hostpathplugin-8fhsm" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.106150 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/6cfb505f-a515-433b-82ac-792d3b435ce1-profile-collector-cert\") pod \"olm-operator-6b444d44fb-97fqd\" (UID: \"6cfb505f-a515-433b-82ac-792d3b435ce1\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-97fqd" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.106229 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/19358ccd-5ef0-4416-81db-347114a9bbe4-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-4lsqd\" (UID: \"19358ccd-5ef0-4416-81db-347114a9bbe4\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4lsqd" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.106352 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/12479473-09c0-4d87-9075-0b37754123a6-plugins-dir\") pod \"csi-hostpathplugin-8fhsm\" (UID: \"12479473-09c0-4d87-9075-0b37754123a6\") " pod="hostpath-provisioner/csi-hostpathplugin-8fhsm" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.106567 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/19358ccd-5ef0-4416-81db-347114a9bbe4-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-4lsqd\" (UID: \"19358ccd-5ef0-4416-81db-347114a9bbe4\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4lsqd" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.106982 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/5ad2068c-8217-424f-aa22-dfb57604ea05-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-2lxzd\" (UID: \"5ad2068c-8217-424f-aa22-dfb57604ea05\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-2lxzd" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.108255 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-87w7k\" (UniqueName: \"kubernetes.io/projected/f8fd1293-3d68-4dd2-bc12-8f7c02017bcd-kube-api-access-87w7k\") pod \"control-plane-machine-set-operator-78cbb6b69f-27wgh\" (UID: \"f8fd1293-3d68-4dd2-bc12-8f7c02017bcd\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-27wgh" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.108661 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/19358ccd-5ef0-4416-81db-347114a9bbe4-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-4lsqd\" (UID: \"19358ccd-5ef0-4416-81db-347114a9bbe4\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4lsqd" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.108791 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ssbtk\" (UniqueName: \"kubernetes.io/projected/19358ccd-5ef0-4416-81db-347114a9bbe4-kube-api-access-ssbtk\") pod \"cluster-image-registry-operator-dc59b4c8b-4lsqd\" (UID: \"19358ccd-5ef0-4416-81db-347114a9bbe4\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4lsqd" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.120385 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/4adc8e3f-786a-4d1b-985b-3f39cf67767a-installation-pull-secrets\") pod \"image-registry-697d97f7c8-v5ktt\" (UID: \"4adc8e3f-786a-4d1b-985b-3f39cf67767a\") " pod="openshift-image-registry/image-registry-697d97f7c8-v5ktt" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.133393 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/82548e7e-b445-4506-b42c-c9c620e82267-proxy-tls\") pod \"machine-config-controller-84d6567774-zbqhj\" (UID: \"82548e7e-b445-4506-b42c-c9c620e82267\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-zbqhj" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.139116 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/1f6c3e28-3c8b-41d2-a314-95103ddb7ab4-bound-sa-token\") pod \"ingress-operator-5b745b69d9-xqg79\" (UID: \"1f6c3e28-3c8b-41d2-a314-95103ddb7ab4\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-xqg79" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.146762 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/1f6c3e28-3c8b-41d2-a314-95103ddb7ab4-metrics-tls\") pod \"ingress-operator-5b745b69d9-xqg79\" (UID: \"1f6c3e28-3c8b-41d2-a314-95103ddb7ab4\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-xqg79" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.146932 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/5ad2068c-8217-424f-aa22-dfb57604ea05-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-2lxzd\" (UID: \"5ad2068c-8217-424f-aa22-dfb57604ea05\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-2lxzd" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.148030 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-2hhkn"] Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.148554 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fmksb\" (UniqueName: \"kubernetes.io/projected/4adc8e3f-786a-4d1b-985b-3f39cf67767a-kube-api-access-fmksb\") pod \"image-registry-697d97f7c8-v5ktt\" (UID: \"4adc8e3f-786a-4d1b-985b-3f39cf67767a\") " pod="openshift-image-registry/image-registry-697d97f7c8-v5ktt" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.151378 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-27wgh" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.154965 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t8r9t\" (UniqueName: \"kubernetes.io/projected/6cd78244-1fbc-4c7b-81ee-be4a2e1eda22-kube-api-access-t8r9t\") pod \"migrator-59844c95c7-7tk5h\" (UID: \"6cd78244-1fbc-4c7b-81ee-be4a2e1eda22\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-7tk5h" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.162578 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-2jmxt"] Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.193676 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-thfrv"] Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.203918 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tz2c4\" (UniqueName: \"kubernetes.io/projected/1f6c3e28-3c8b-41d2-a314-95103ddb7ab4-kube-api-access-tz2c4\") pod \"ingress-operator-5b745b69d9-xqg79\" (UID: \"1f6c3e28-3c8b-41d2-a314-95103ddb7ab4\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-xqg79" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.209313 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-p8tts" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.210761 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f6aa26e9-64fd-4591-9824-bb8b4ddfce0f-serving-cert\") pod \"service-ca-operator-777779d784-2mj6l\" (UID: \"f6aa26e9-64fd-4591-9824-bb8b4ddfce0f\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-2mj6l" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.210800 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c88b0ab7-f277-4d78-937d-9268c3e34eae-config-volume\") pod \"dns-default-xx5gz\" (UID: \"c88b0ab7-f277-4d78-937d-9268c3e34eae\") " pod="openshift-dns/dns-default-xx5gz" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.210853 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4rzct\" (UniqueName: \"kubernetes.io/projected/05f54403-9045-4e0b-94af-636e78ba5c52-kube-api-access-4rzct\") pod \"package-server-manager-789f6589d5-k85tm\" (UID: \"05f54403-9045-4e0b-94af-636e78ba5c52\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-k85tm" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.210935 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/3bc42b99-e1e0-4c88-90fd-8ba933095287-etcd-ca\") pod \"etcd-operator-b45778765-97x45\" (UID: \"3bc42b99-e1e0-4c88-90fd-8ba933095287\") " pod="openshift-etcd-operator/etcd-operator-b45778765-97x45" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.211663 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dxc6z\" (UniqueName: \"kubernetes.io/projected/da275bab-31cd-4cf4-bb88-820b837416ec-kube-api-access-dxc6z\") pod \"machine-config-server-w55bl\" (UID: \"da275bab-31cd-4cf4-bb88-820b837416ec\") " pod="openshift-machine-config-operator/machine-config-server-w55bl" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.214182 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/12479473-09c0-4d87-9075-0b37754123a6-socket-dir\") pod \"csi-hostpathplugin-8fhsm\" (UID: \"12479473-09c0-4d87-9075-0b37754123a6\") " pod="hostpath-provisioner/csi-hostpathplugin-8fhsm" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.214319 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/3bc42b99-e1e0-4c88-90fd-8ba933095287-etcd-ca\") pod \"etcd-operator-b45778765-97x45\" (UID: \"3bc42b99-e1e0-4c88-90fd-8ba933095287\") " pod="openshift-etcd-operator/etcd-operator-b45778765-97x45" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.214984 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wm6m8\" (UniqueName: \"kubernetes.io/projected/5ad2068c-8217-424f-aa22-dfb57604ea05-kube-api-access-wm6m8\") pod \"multus-admission-controller-857f4d67dd-2lxzd\" (UID: \"5ad2068c-8217-424f-aa22-dfb57604ea05\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-2lxzd" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.221397 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f6aa26e9-64fd-4591-9824-bb8b4ddfce0f-serving-cert\") pod \"service-ca-operator-777779d784-2mj6l\" (UID: \"f6aa26e9-64fd-4591-9824-bb8b4ddfce0f\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-2mj6l" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.228047 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-7tk5h" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.228341 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-xqg79" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.228469 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/12479473-09c0-4d87-9075-0b37754123a6-socket-dir\") pod \"csi-hostpathplugin-8fhsm\" (UID: \"12479473-09c0-4d87-9075-0b37754123a6\") " pod="hostpath-provisioner/csi-hostpathplugin-8fhsm" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.228563 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qftf8\" (UniqueName: \"kubernetes.io/projected/24845824-d21b-4793-8bcf-632ef188076e-kube-api-access-qftf8\") pod \"ingress-canary-9qvz2\" (UID: \"24845824-d21b-4793-8bcf-632ef188076e\") " pod="openshift-ingress-canary/ingress-canary-9qvz2" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.228910 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/da275bab-31cd-4cf4-bb88-820b837416ec-certs\") pod \"machine-config-server-w55bl\" (UID: \"da275bab-31cd-4cf4-bb88-820b837416ec\") " pod="openshift-machine-config-operator/machine-config-server-w55bl" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.228994 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-rgxlz"] Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.229031 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/6cfb505f-a515-433b-82ac-792d3b435ce1-profile-collector-cert\") pod \"olm-operator-6b444d44fb-97fqd\" (UID: \"6cfb505f-a515-433b-82ac-792d3b435ce1\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-97fqd" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.229084 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1aa4c8c0-ff6e-4683-a554-0286fb970db3-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-jkbfd\" (UID: \"1aa4c8c0-ff6e-4683-a554-0286fb970db3\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-jkbfd" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.229104 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/12479473-09c0-4d87-9075-0b37754123a6-plugins-dir\") pod \"csi-hostpathplugin-8fhsm\" (UID: \"12479473-09c0-4d87-9075-0b37754123a6\") " pod="hostpath-provisioner/csi-hostpathplugin-8fhsm" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.229458 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/5977b2a6-9bef-4215-a7d6-602ea9be37c2-signing-cabundle\") pod \"service-ca-9c57cc56f-f7bf8\" (UID: \"5977b2a6-9bef-4215-a7d6-602ea9be37c2\") " pod="openshift-service-ca/service-ca-9c57cc56f-f7bf8" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.229493 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/a25ac582-d0a6-4bd7-a9c9-dbed70086212-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-cd92z\" (UID: \"a25ac582-d0a6-4bd7-a9c9-dbed70086212\") " pod="openshift-marketplace/marketplace-operator-79b997595-cd92z" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.229884 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/24845824-d21b-4793-8bcf-632ef188076e-cert\") pod \"ingress-canary-9qvz2\" (UID: \"24845824-d21b-4793-8bcf-632ef188076e\") " pod="openshift-ingress-canary/ingress-canary-9qvz2" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.230043 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5lgkk\" (UniqueName: \"kubernetes.io/projected/c88b0ab7-f277-4d78-937d-9268c3e34eae-kube-api-access-5lgkk\") pod \"dns-default-xx5gz\" (UID: \"c88b0ab7-f277-4d78-937d-9268c3e34eae\") " pod="openshift-dns/dns-default-xx5gz" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.230140 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c67w2\" (UniqueName: \"kubernetes.io/projected/f6aa26e9-64fd-4591-9824-bb8b4ddfce0f-kube-api-access-c67w2\") pod \"service-ca-operator-777779d784-2mj6l\" (UID: \"f6aa26e9-64fd-4591-9824-bb8b4ddfce0f\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-2mj6l" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.230193 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/3bc42b99-e1e0-4c88-90fd-8ba933095287-etcd-service-ca\") pod \"etcd-operator-b45778765-97x45\" (UID: \"3bc42b99-e1e0-4c88-90fd-8ba933095287\") " pod="openshift-etcd-operator/etcd-operator-b45778765-97x45" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.230052 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/12479473-09c0-4d87-9075-0b37754123a6-plugins-dir\") pod \"csi-hostpathplugin-8fhsm\" (UID: \"12479473-09c0-4d87-9075-0b37754123a6\") " pod="hostpath-provisioner/csi-hostpathplugin-8fhsm" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.230219 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1aa4c8c0-ff6e-4683-a554-0286fb970db3-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-jkbfd\" (UID: \"1aa4c8c0-ff6e-4683-a554-0286fb970db3\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-jkbfd" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.230841 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-2mj56"] Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.231011 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-v5ktt\" (UID: \"4adc8e3f-786a-4d1b-985b-3f39cf67767a\") " pod="openshift-image-registry/image-registry-697d97f7c8-v5ktt" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.231265 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/3bc42b99-e1e0-4c88-90fd-8ba933095287-etcd-client\") pod \"etcd-operator-b45778765-97x45\" (UID: \"3bc42b99-e1e0-4c88-90fd-8ba933095287\") " pod="openshift-etcd-operator/etcd-operator-b45778765-97x45" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.231321 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/6cfb505f-a515-433b-82ac-792d3b435ce1-srv-cert\") pod \"olm-operator-6b444d44fb-97fqd\" (UID: \"6cfb505f-a515-433b-82ac-792d3b435ce1\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-97fqd" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.231345 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/5b16fb63-ca55-4c40-8d92-21477dd79984-auth-proxy-config\") pod \"machine-config-operator-74547568cd-r4vsk\" (UID: \"5b16fb63-ca55-4c40-8d92-21477dd79984\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-r4vsk" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.231396 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f6aa26e9-64fd-4591-9824-bb8b4ddfce0f-config\") pod \"service-ca-operator-777779d784-2mj6l\" (UID: \"f6aa26e9-64fd-4591-9824-bb8b4ddfce0f\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-2mj6l" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.231426 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r98nv\" (UniqueName: \"kubernetes.io/projected/b0ea38e2-3e31-4208-a918-2859626f0048-kube-api-access-r98nv\") pod \"collect-profiles-29335965-cqjjn\" (UID: \"b0ea38e2-3e31-4208-a918-2859626f0048\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335965-cqjjn" Oct 11 04:53:41 crc kubenswrapper[4651]: E1011 04:53:41.231450 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 04:53:41.731437533 +0000 UTC m=+142.627670329 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-v5ktt" (UID: "4adc8e3f-786a-4d1b-985b-3f39cf67767a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.231475 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/928ecea2-0af7-48bf-a442-ee6c6c86d00c-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-2q6fr\" (UID: \"928ecea2-0af7-48bf-a442-ee6c6c86d00c\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-2q6fr" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.231221 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/3bc42b99-e1e0-4c88-90fd-8ba933095287-etcd-service-ca\") pod \"etcd-operator-b45778765-97x45\" (UID: \"3bc42b99-e1e0-4c88-90fd-8ba933095287\") " pod="openshift-etcd-operator/etcd-operator-b45778765-97x45" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.231513 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-92r8x\" (UniqueName: \"kubernetes.io/projected/6cfb505f-a515-433b-82ac-792d3b435ce1-kube-api-access-92r8x\") pod \"olm-operator-6b444d44fb-97fqd\" (UID: \"6cfb505f-a515-433b-82ac-792d3b435ce1\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-97fqd" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.231841 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a25ac582-d0a6-4bd7-a9c9-dbed70086212-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-cd92z\" (UID: \"a25ac582-d0a6-4bd7-a9c9-dbed70086212\") " pod="openshift-marketplace/marketplace-operator-79b997595-cd92z" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.231893 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/12479473-09c0-4d87-9075-0b37754123a6-csi-data-dir\") pod \"csi-hostpathplugin-8fhsm\" (UID: \"12479473-09c0-4d87-9075-0b37754123a6\") " pod="hostpath-provisioner/csi-hostpathplugin-8fhsm" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.231910 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/928ecea2-0af7-48bf-a442-ee6c6c86d00c-config\") pod \"kube-apiserver-operator-766d6c64bb-2q6fr\" (UID: \"928ecea2-0af7-48bf-a442-ee6c6c86d00c\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-2q6fr" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.231936 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3bc42b99-e1e0-4c88-90fd-8ba933095287-config\") pod \"etcd-operator-b45778765-97x45\" (UID: \"3bc42b99-e1e0-4c88-90fd-8ba933095287\") " pod="openshift-etcd-operator/etcd-operator-b45778765-97x45" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.231954 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b0ea38e2-3e31-4208-a918-2859626f0048-config-volume\") pod \"collect-profiles-29335965-cqjjn\" (UID: \"b0ea38e2-3e31-4208-a918-2859626f0048\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335965-cqjjn" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.231969 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/da275bab-31cd-4cf4-bb88-820b837416ec-node-bootstrap-token\") pod \"machine-config-server-w55bl\" (UID: \"da275bab-31cd-4cf4-bb88-820b837416ec\") " pod="openshift-machine-config-operator/machine-config-server-w55bl" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.231995 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cwlps\" (UniqueName: \"kubernetes.io/projected/a25ac582-d0a6-4bd7-a9c9-dbed70086212-kube-api-access-cwlps\") pod \"marketplace-operator-79b997595-cd92z\" (UID: \"a25ac582-d0a6-4bd7-a9c9-dbed70086212\") " pod="openshift-marketplace/marketplace-operator-79b997595-cd92z" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.232009 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/c88b0ab7-f277-4d78-937d-9268c3e34eae-metrics-tls\") pod \"dns-default-xx5gz\" (UID: \"c88b0ab7-f277-4d78-937d-9268c3e34eae\") " pod="openshift-dns/dns-default-xx5gz" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.232030 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v88sq\" (UniqueName: \"kubernetes.io/projected/12479473-09c0-4d87-9075-0b37754123a6-kube-api-access-v88sq\") pod \"csi-hostpathplugin-8fhsm\" (UID: \"12479473-09c0-4d87-9075-0b37754123a6\") " pod="hostpath-provisioner/csi-hostpathplugin-8fhsm" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.232059 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/5b16fb63-ca55-4c40-8d92-21477dd79984-images\") pod \"machine-config-operator-74547568cd-r4vsk\" (UID: \"5b16fb63-ca55-4c40-8d92-21477dd79984\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-r4vsk" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.232076 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3bc42b99-e1e0-4c88-90fd-8ba933095287-serving-cert\") pod \"etcd-operator-b45778765-97x45\" (UID: \"3bc42b99-e1e0-4c88-90fd-8ba933095287\") " pod="openshift-etcd-operator/etcd-operator-b45778765-97x45" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.232098 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/5977b2a6-9bef-4215-a7d6-602ea9be37c2-signing-key\") pod \"service-ca-9c57cc56f-f7bf8\" (UID: \"5977b2a6-9bef-4215-a7d6-602ea9be37c2\") " pod="openshift-service-ca/service-ca-9c57cc56f-f7bf8" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.232122 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/5b16fb63-ca55-4c40-8d92-21477dd79984-proxy-tls\") pod \"machine-config-operator-74547568cd-r4vsk\" (UID: \"5b16fb63-ca55-4c40-8d92-21477dd79984\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-r4vsk" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.232146 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/12479473-09c0-4d87-9075-0b37754123a6-registration-dir\") pod \"csi-hostpathplugin-8fhsm\" (UID: \"12479473-09c0-4d87-9075-0b37754123a6\") " pod="hostpath-provisioner/csi-hostpathplugin-8fhsm" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.232166 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b0ea38e2-3e31-4208-a918-2859626f0048-secret-volume\") pod \"collect-profiles-29335965-cqjjn\" (UID: \"b0ea38e2-3e31-4208-a918-2859626f0048\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335965-cqjjn" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.232185 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/12479473-09c0-4d87-9075-0b37754123a6-mountpoint-dir\") pod \"csi-hostpathplugin-8fhsm\" (UID: \"12479473-09c0-4d87-9075-0b37754123a6\") " pod="hostpath-provisioner/csi-hostpathplugin-8fhsm" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.232221 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1aa4c8c0-ff6e-4683-a554-0286fb970db3-config\") pod \"kube-controller-manager-operator-78b949d7b-jkbfd\" (UID: \"1aa4c8c0-ff6e-4683-a554-0286fb970db3\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-jkbfd" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.232240 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/928ecea2-0af7-48bf-a442-ee6c6c86d00c-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-2q6fr\" (UID: \"928ecea2-0af7-48bf-a442-ee6c6c86d00c\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-2q6fr" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.232258 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j2b64\" (UniqueName: \"kubernetes.io/projected/3bc42b99-e1e0-4c88-90fd-8ba933095287-kube-api-access-j2b64\") pod \"etcd-operator-b45778765-97x45\" (UID: \"3bc42b99-e1e0-4c88-90fd-8ba933095287\") " pod="openshift-etcd-operator/etcd-operator-b45778765-97x45" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.232273 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/05f54403-9045-4e0b-94af-636e78ba5c52-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-k85tm\" (UID: \"05f54403-9045-4e0b-94af-636e78ba5c52\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-k85tm" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.232292 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rtt78\" (UniqueName: \"kubernetes.io/projected/5b16fb63-ca55-4c40-8d92-21477dd79984-kube-api-access-rtt78\") pod \"machine-config-operator-74547568cd-r4vsk\" (UID: \"5b16fb63-ca55-4c40-8d92-21477dd79984\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-r4vsk" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.232315 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h9r78\" (UniqueName: \"kubernetes.io/projected/5977b2a6-9bef-4215-a7d6-602ea9be37c2-kube-api-access-h9r78\") pod \"service-ca-9c57cc56f-f7bf8\" (UID: \"5977b2a6-9bef-4215-a7d6-602ea9be37c2\") " pod="openshift-service-ca/service-ca-9c57cc56f-f7bf8" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.232441 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/12479473-09c0-4d87-9075-0b37754123a6-csi-data-dir\") pod \"csi-hostpathplugin-8fhsm\" (UID: \"12479473-09c0-4d87-9075-0b37754123a6\") " pod="hostpath-provisioner/csi-hostpathplugin-8fhsm" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.232652 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f6aa26e9-64fd-4591-9824-bb8b4ddfce0f-config\") pod \"service-ca-operator-777779d784-2mj6l\" (UID: \"f6aa26e9-64fd-4591-9824-bb8b4ddfce0f\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-2mj6l" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.232908 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3bc42b99-e1e0-4c88-90fd-8ba933095287-config\") pod \"etcd-operator-b45778765-97x45\" (UID: \"3bc42b99-e1e0-4c88-90fd-8ba933095287\") " pod="openshift-etcd-operator/etcd-operator-b45778765-97x45" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.232959 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/12479473-09c0-4d87-9075-0b37754123a6-registration-dir\") pod \"csi-hostpathplugin-8fhsm\" (UID: \"12479473-09c0-4d87-9075-0b37754123a6\") " pod="hostpath-provisioner/csi-hostpathplugin-8fhsm" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.232998 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/12479473-09c0-4d87-9075-0b37754123a6-mountpoint-dir\") pod \"csi-hostpathplugin-8fhsm\" (UID: \"12479473-09c0-4d87-9075-0b37754123a6\") " pod="hostpath-provisioner/csi-hostpathplugin-8fhsm" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.233072 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/5b16fb63-ca55-4c40-8d92-21477dd79984-auth-proxy-config\") pod \"machine-config-operator-74547568cd-r4vsk\" (UID: \"5b16fb63-ca55-4c40-8d92-21477dd79984\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-r4vsk" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.233757 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/5b16fb63-ca55-4c40-8d92-21477dd79984-images\") pod \"machine-config-operator-74547568cd-r4vsk\" (UID: \"5b16fb63-ca55-4c40-8d92-21477dd79984\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-r4vsk" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.233867 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jldzf\" (UniqueName: \"kubernetes.io/projected/fc942eea-0c2a-474f-bc24-c6d5fb171c79-kube-api-access-jldzf\") pod \"openshift-config-operator-7777fb866f-h9rlq\" (UID: \"fc942eea-0c2a-474f-bc24-c6d5fb171c79\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-h9rlq" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.239314 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3bc42b99-e1e0-4c88-90fd-8ba933095287-serving-cert\") pod \"etcd-operator-b45778765-97x45\" (UID: \"3bc42b99-e1e0-4c88-90fd-8ba933095287\") " pod="openshift-etcd-operator/etcd-operator-b45778765-97x45" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.239521 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/6cfb505f-a515-433b-82ac-792d3b435ce1-srv-cert\") pod \"olm-operator-6b444d44fb-97fqd\" (UID: \"6cfb505f-a515-433b-82ac-792d3b435ce1\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-97fqd" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.240800 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/6cfb505f-a515-433b-82ac-792d3b435ce1-profile-collector-cert\") pod \"olm-operator-6b444d44fb-97fqd\" (UID: \"6cfb505f-a515-433b-82ac-792d3b435ce1\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-97fqd" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.245151 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/3bc42b99-e1e0-4c88-90fd-8ba933095287-etcd-client\") pod \"etcd-operator-b45778765-97x45\" (UID: \"3bc42b99-e1e0-4c88-90fd-8ba933095287\") " pod="openshift-etcd-operator/etcd-operator-b45778765-97x45" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.245192 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/4adc8e3f-786a-4d1b-985b-3f39cf67767a-bound-sa-token\") pod \"image-registry-697d97f7c8-v5ktt\" (UID: \"4adc8e3f-786a-4d1b-985b-3f39cf67767a\") " pod="openshift-image-registry/image-registry-697d97f7c8-v5ktt" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.247353 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/5b16fb63-ca55-4c40-8d92-21477dd79984-proxy-tls\") pod \"machine-config-operator-74547568cd-r4vsk\" (UID: \"5b16fb63-ca55-4c40-8d92-21477dd79984\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-r4vsk" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.258921 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-44nls\" (UniqueName: \"kubernetes.io/projected/82548e7e-b445-4506-b42c-c9c620e82267-kube-api-access-44nls\") pod \"machine-config-controller-84d6567774-zbqhj\" (UID: \"82548e7e-b445-4506-b42c-c9c620e82267\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-zbqhj" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.276249 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/19358ccd-5ef0-4416-81db-347114a9bbe4-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-4lsqd\" (UID: \"19358ccd-5ef0-4416-81db-347114a9bbe4\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4lsqd" Oct 11 04:53:41 crc kubenswrapper[4651]: W1011 04:53:41.284534 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd32c16ff_cc90_4759_a695_405d76694b39.slice/crio-ba73190a205922eae89a31ec7a8eb3cd1bea61706b3d38dfb203d84b25188d18 WatchSource:0}: Error finding container ba73190a205922eae89a31ec7a8eb3cd1bea61706b3d38dfb203d84b25188d18: Status 404 returned error can't find the container with id ba73190a205922eae89a31ec7a8eb3cd1bea61706b3d38dfb203d84b25188d18 Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.313601 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c67w2\" (UniqueName: \"kubernetes.io/projected/f6aa26e9-64fd-4591-9824-bb8b4ddfce0f-kube-api-access-c67w2\") pod \"service-ca-operator-777779d784-2mj6l\" (UID: \"f6aa26e9-64fd-4591-9824-bb8b4ddfce0f\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-2mj6l" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.333186 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.333417 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/5977b2a6-9bef-4215-a7d6-602ea9be37c2-signing-cabundle\") pod \"service-ca-9c57cc56f-f7bf8\" (UID: \"5977b2a6-9bef-4215-a7d6-602ea9be37c2\") " pod="openshift-service-ca/service-ca-9c57cc56f-f7bf8" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.333463 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/a25ac582-d0a6-4bd7-a9c9-dbed70086212-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-cd92z\" (UID: \"a25ac582-d0a6-4bd7-a9c9-dbed70086212\") " pod="openshift-marketplace/marketplace-operator-79b997595-cd92z" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.333486 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/24845824-d21b-4793-8bcf-632ef188076e-cert\") pod \"ingress-canary-9qvz2\" (UID: \"24845824-d21b-4793-8bcf-632ef188076e\") " pod="openshift-ingress-canary/ingress-canary-9qvz2" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.333503 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5lgkk\" (UniqueName: \"kubernetes.io/projected/c88b0ab7-f277-4d78-937d-9268c3e34eae-kube-api-access-5lgkk\") pod \"dns-default-xx5gz\" (UID: \"c88b0ab7-f277-4d78-937d-9268c3e34eae\") " pod="openshift-dns/dns-default-xx5gz" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.333548 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1aa4c8c0-ff6e-4683-a554-0286fb970db3-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-jkbfd\" (UID: \"1aa4c8c0-ff6e-4683-a554-0286fb970db3\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-jkbfd" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.333585 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r98nv\" (UniqueName: \"kubernetes.io/projected/b0ea38e2-3e31-4208-a918-2859626f0048-kube-api-access-r98nv\") pod \"collect-profiles-29335965-cqjjn\" (UID: \"b0ea38e2-3e31-4208-a918-2859626f0048\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335965-cqjjn" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.333619 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/928ecea2-0af7-48bf-a442-ee6c6c86d00c-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-2q6fr\" (UID: \"928ecea2-0af7-48bf-a442-ee6c6c86d00c\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-2q6fr" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.333654 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a25ac582-d0a6-4bd7-a9c9-dbed70086212-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-cd92z\" (UID: \"a25ac582-d0a6-4bd7-a9c9-dbed70086212\") " pod="openshift-marketplace/marketplace-operator-79b997595-cd92z" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.333698 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/928ecea2-0af7-48bf-a442-ee6c6c86d00c-config\") pod \"kube-apiserver-operator-766d6c64bb-2q6fr\" (UID: \"928ecea2-0af7-48bf-a442-ee6c6c86d00c\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-2q6fr" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.333718 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b0ea38e2-3e31-4208-a918-2859626f0048-config-volume\") pod \"collect-profiles-29335965-cqjjn\" (UID: \"b0ea38e2-3e31-4208-a918-2859626f0048\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335965-cqjjn" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.333734 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/da275bab-31cd-4cf4-bb88-820b837416ec-node-bootstrap-token\") pod \"machine-config-server-w55bl\" (UID: \"da275bab-31cd-4cf4-bb88-820b837416ec\") " pod="openshift-machine-config-operator/machine-config-server-w55bl" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.333769 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cwlps\" (UniqueName: \"kubernetes.io/projected/a25ac582-d0a6-4bd7-a9c9-dbed70086212-kube-api-access-cwlps\") pod \"marketplace-operator-79b997595-cd92z\" (UID: \"a25ac582-d0a6-4bd7-a9c9-dbed70086212\") " pod="openshift-marketplace/marketplace-operator-79b997595-cd92z" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.333786 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/c88b0ab7-f277-4d78-937d-9268c3e34eae-metrics-tls\") pod \"dns-default-xx5gz\" (UID: \"c88b0ab7-f277-4d78-937d-9268c3e34eae\") " pod="openshift-dns/dns-default-xx5gz" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.333804 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/5977b2a6-9bef-4215-a7d6-602ea9be37c2-signing-key\") pod \"service-ca-9c57cc56f-f7bf8\" (UID: \"5977b2a6-9bef-4215-a7d6-602ea9be37c2\") " pod="openshift-service-ca/service-ca-9c57cc56f-f7bf8" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.333936 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b0ea38e2-3e31-4208-a918-2859626f0048-secret-volume\") pod \"collect-profiles-29335965-cqjjn\" (UID: \"b0ea38e2-3e31-4208-a918-2859626f0048\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335965-cqjjn" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.333959 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1aa4c8c0-ff6e-4683-a554-0286fb970db3-config\") pod \"kube-controller-manager-operator-78b949d7b-jkbfd\" (UID: \"1aa4c8c0-ff6e-4683-a554-0286fb970db3\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-jkbfd" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.333977 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/928ecea2-0af7-48bf-a442-ee6c6c86d00c-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-2q6fr\" (UID: \"928ecea2-0af7-48bf-a442-ee6c6c86d00c\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-2q6fr" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.334021 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/05f54403-9045-4e0b-94af-636e78ba5c52-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-k85tm\" (UID: \"05f54403-9045-4e0b-94af-636e78ba5c52\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-k85tm" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.334046 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h9r78\" (UniqueName: \"kubernetes.io/projected/5977b2a6-9bef-4215-a7d6-602ea9be37c2-kube-api-access-h9r78\") pod \"service-ca-9c57cc56f-f7bf8\" (UID: \"5977b2a6-9bef-4215-a7d6-602ea9be37c2\") " pod="openshift-service-ca/service-ca-9c57cc56f-f7bf8" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.334094 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c88b0ab7-f277-4d78-937d-9268c3e34eae-config-volume\") pod \"dns-default-xx5gz\" (UID: \"c88b0ab7-f277-4d78-937d-9268c3e34eae\") " pod="openshift-dns/dns-default-xx5gz" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.334112 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4rzct\" (UniqueName: \"kubernetes.io/projected/05f54403-9045-4e0b-94af-636e78ba5c52-kube-api-access-4rzct\") pod \"package-server-manager-789f6589d5-k85tm\" (UID: \"05f54403-9045-4e0b-94af-636e78ba5c52\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-k85tm" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.334144 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dxc6z\" (UniqueName: \"kubernetes.io/projected/da275bab-31cd-4cf4-bb88-820b837416ec-kube-api-access-dxc6z\") pod \"machine-config-server-w55bl\" (UID: \"da275bab-31cd-4cf4-bb88-820b837416ec\") " pod="openshift-machine-config-operator/machine-config-server-w55bl" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.334186 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qftf8\" (UniqueName: \"kubernetes.io/projected/24845824-d21b-4793-8bcf-632ef188076e-kube-api-access-qftf8\") pod \"ingress-canary-9qvz2\" (UID: \"24845824-d21b-4793-8bcf-632ef188076e\") " pod="openshift-ingress-canary/ingress-canary-9qvz2" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.334201 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/da275bab-31cd-4cf4-bb88-820b837416ec-certs\") pod \"machine-config-server-w55bl\" (UID: \"da275bab-31cd-4cf4-bb88-820b837416ec\") " pod="openshift-machine-config-operator/machine-config-server-w55bl" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.334217 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1aa4c8c0-ff6e-4683-a554-0286fb970db3-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-jkbfd\" (UID: \"1aa4c8c0-ff6e-4683-a554-0286fb970db3\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-jkbfd" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.334578 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-92r8x\" (UniqueName: \"kubernetes.io/projected/6cfb505f-a515-433b-82ac-792d3b435ce1-kube-api-access-92r8x\") pod \"olm-operator-6b444d44fb-97fqd\" (UID: \"6cfb505f-a515-433b-82ac-792d3b435ce1\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-97fqd" Oct 11 04:53:41 crc kubenswrapper[4651]: E1011 04:53:41.335990 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 04:53:41.835965378 +0000 UTC m=+142.732198175 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.336909 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c88b0ab7-f277-4d78-937d-9268c3e34eae-config-volume\") pod \"dns-default-xx5gz\" (UID: \"c88b0ab7-f277-4d78-937d-9268c3e34eae\") " pod="openshift-dns/dns-default-xx5gz" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.337768 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b0ea38e2-3e31-4208-a918-2859626f0048-config-volume\") pod \"collect-profiles-29335965-cqjjn\" (UID: \"b0ea38e2-3e31-4208-a918-2859626f0048\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335965-cqjjn" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.337783 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a25ac582-d0a6-4bd7-a9c9-dbed70086212-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-cd92z\" (UID: \"a25ac582-d0a6-4bd7-a9c9-dbed70086212\") " pod="openshift-marketplace/marketplace-operator-79b997595-cd92z" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.338613 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/5977b2a6-9bef-4215-a7d6-602ea9be37c2-signing-cabundle\") pod \"service-ca-9c57cc56f-f7bf8\" (UID: \"5977b2a6-9bef-4215-a7d6-602ea9be37c2\") " pod="openshift-service-ca/service-ca-9c57cc56f-f7bf8" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.344005 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1aa4c8c0-ff6e-4683-a554-0286fb970db3-config\") pod \"kube-controller-manager-operator-78b949d7b-jkbfd\" (UID: \"1aa4c8c0-ff6e-4683-a554-0286fb970db3\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-jkbfd" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.346588 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-s9qnd"] Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.346584 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/928ecea2-0af7-48bf-a442-ee6c6c86d00c-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-2q6fr\" (UID: \"928ecea2-0af7-48bf-a442-ee6c6c86d00c\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-2q6fr" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.346717 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/da275bab-31cd-4cf4-bb88-820b837416ec-node-bootstrap-token\") pod \"machine-config-server-w55bl\" (UID: \"da275bab-31cd-4cf4-bb88-820b837416ec\") " pod="openshift-machine-config-operator/machine-config-server-w55bl" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.349419 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/c88b0ab7-f277-4d78-937d-9268c3e34eae-metrics-tls\") pod \"dns-default-xx5gz\" (UID: \"c88b0ab7-f277-4d78-937d-9268c3e34eae\") " pod="openshift-dns/dns-default-xx5gz" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.350125 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/05f54403-9045-4e0b-94af-636e78ba5c52-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-k85tm\" (UID: \"05f54403-9045-4e0b-94af-636e78ba5c52\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-k85tm" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.352709 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/24845824-d21b-4793-8bcf-632ef188076e-cert\") pod \"ingress-canary-9qvz2\" (UID: \"24845824-d21b-4793-8bcf-632ef188076e\") " pod="openshift-ingress-canary/ingress-canary-9qvz2" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.353421 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/da275bab-31cd-4cf4-bb88-820b837416ec-certs\") pod \"machine-config-server-w55bl\" (UID: \"da275bab-31cd-4cf4-bb88-820b837416ec\") " pod="openshift-machine-config-operator/machine-config-server-w55bl" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.354591 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b0ea38e2-3e31-4208-a918-2859626f0048-secret-volume\") pod \"collect-profiles-29335965-cqjjn\" (UID: \"b0ea38e2-3e31-4208-a918-2859626f0048\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335965-cqjjn" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.356509 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/5977b2a6-9bef-4215-a7d6-602ea9be37c2-signing-key\") pod \"service-ca-9c57cc56f-f7bf8\" (UID: \"5977b2a6-9bef-4215-a7d6-602ea9be37c2\") " pod="openshift-service-ca/service-ca-9c57cc56f-f7bf8" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.357885 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j2b64\" (UniqueName: \"kubernetes.io/projected/3bc42b99-e1e0-4c88-90fd-8ba933095287-kube-api-access-j2b64\") pod \"etcd-operator-b45778765-97x45\" (UID: \"3bc42b99-e1e0-4c88-90fd-8ba933095287\") " pod="openshift-etcd-operator/etcd-operator-b45778765-97x45" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.361228 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1aa4c8c0-ff6e-4683-a554-0286fb970db3-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-jkbfd\" (UID: \"1aa4c8c0-ff6e-4683-a554-0286fb970db3\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-jkbfd" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.370547 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-h9rlq" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.376571 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rtt78\" (UniqueName: \"kubernetes.io/projected/5b16fb63-ca55-4c40-8d92-21477dd79984-kube-api-access-rtt78\") pod \"machine-config-operator-74547568cd-r4vsk\" (UID: \"5b16fb63-ca55-4c40-8d92-21477dd79984\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-r4vsk" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.379925 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4lsqd" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.391902 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v88sq\" (UniqueName: \"kubernetes.io/projected/12479473-09c0-4d87-9075-0b37754123a6-kube-api-access-v88sq\") pod \"csi-hostpathplugin-8fhsm\" (UID: \"12479473-09c0-4d87-9075-0b37754123a6\") " pod="hostpath-provisioner/csi-hostpathplugin-8fhsm" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.396298 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-n4hfz"] Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.408101 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/928ecea2-0af7-48bf-a442-ee6c6c86d00c-config\") pod \"kube-apiserver-operator-766d6c64bb-2q6fr\" (UID: \"928ecea2-0af7-48bf-a442-ee6c6c86d00c\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-2q6fr" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.409030 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/a25ac582-d0a6-4bd7-a9c9-dbed70086212-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-cd92z\" (UID: \"a25ac582-d0a6-4bd7-a9c9-dbed70086212\") " pod="openshift-marketplace/marketplace-operator-79b997595-cd92z" Oct 11 04:53:41 crc kubenswrapper[4651]: W1011 04:53:41.430553 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod49755973_5d14_4c72_9858_7edca1f2c2ee.slice/crio-65809da5fb71c7abaf4c9557f4966f5c9f52156146608327b87019c93a9d7968 WatchSource:0}: Error finding container 65809da5fb71c7abaf4c9557f4966f5c9f52156146608327b87019c93a9d7968: Status 404 returned error can't find the container with id 65809da5fb71c7abaf4c9557f4966f5c9f52156146608327b87019c93a9d7968 Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.438545 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-v5ktt\" (UID: \"4adc8e3f-786a-4d1b-985b-3f39cf67767a\") " pod="openshift-image-registry/image-registry-697d97f7c8-v5ktt" Oct 11 04:53:41 crc kubenswrapper[4651]: E1011 04:53:41.439265 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 04:53:41.939249021 +0000 UTC m=+142.835481827 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-v5ktt" (UID: "4adc8e3f-786a-4d1b-985b-3f39cf67767a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.441888 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/928ecea2-0af7-48bf-a442-ee6c6c86d00c-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-2q6fr\" (UID: \"928ecea2-0af7-48bf-a442-ee6c6c86d00c\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-2q6fr" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.444713 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-2lxzd" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.460356 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-mdhlw"] Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.460594 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-zbqhj" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.470475 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dxc6z\" (UniqueName: \"kubernetes.io/projected/da275bab-31cd-4cf4-bb88-820b837416ec-kube-api-access-dxc6z\") pod \"machine-config-server-w55bl\" (UID: \"da275bab-31cd-4cf4-bb88-820b837416ec\") " pod="openshift-machine-config-operator/machine-config-server-w55bl" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.470577 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h9r78\" (UniqueName: \"kubernetes.io/projected/5977b2a6-9bef-4215-a7d6-602ea9be37c2-kube-api-access-h9r78\") pod \"service-ca-9c57cc56f-f7bf8\" (UID: \"5977b2a6-9bef-4215-a7d6-602ea9be37c2\") " pod="openshift-service-ca/service-ca-9c57cc56f-f7bf8" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.497341 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4rzct\" (UniqueName: \"kubernetes.io/projected/05f54403-9045-4e0b-94af-636e78ba5c52-kube-api-access-4rzct\") pod \"package-server-manager-789f6589d5-k85tm\" (UID: \"05f54403-9045-4e0b-94af-636e78ba5c52\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-k85tm" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.525462 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1aa4c8c0-ff6e-4683-a554-0286fb970db3-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-jkbfd\" (UID: \"1aa4c8c0-ff6e-4683-a554-0286fb970db3\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-jkbfd" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.539890 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 04:53:41 crc kubenswrapper[4651]: E1011 04:53:41.540065 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 04:53:42.040041929 +0000 UTC m=+142.936274725 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.540161 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-v5ktt\" (UID: \"4adc8e3f-786a-4d1b-985b-3f39cf67767a\") " pod="openshift-image-registry/image-registry-697d97f7c8-v5ktt" Oct 11 04:53:41 crc kubenswrapper[4651]: E1011 04:53:41.540463 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 04:53:42.04045604 +0000 UTC m=+142.936688836 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-v5ktt" (UID: "4adc8e3f-786a-4d1b-985b-3f39cf67767a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.547585 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5lgkk\" (UniqueName: \"kubernetes.io/projected/c88b0ab7-f277-4d78-937d-9268c3e34eae-kube-api-access-5lgkk\") pod \"dns-default-xx5gz\" (UID: \"c88b0ab7-f277-4d78-937d-9268c3e34eae\") " pod="openshift-dns/dns-default-xx5gz" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.552651 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cwlps\" (UniqueName: \"kubernetes.io/projected/a25ac582-d0a6-4bd7-a9c9-dbed70086212-kube-api-access-cwlps\") pod \"marketplace-operator-79b997595-cd92z\" (UID: \"a25ac582-d0a6-4bd7-a9c9-dbed70086212\") " pod="openshift-marketplace/marketplace-operator-79b997595-cd92z" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.558557 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-cd92z" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.566970 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-k85tm" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.576379 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qftf8\" (UniqueName: \"kubernetes.io/projected/24845824-d21b-4793-8bcf-632ef188076e-kube-api-access-qftf8\") pod \"ingress-canary-9qvz2\" (UID: \"24845824-d21b-4793-8bcf-632ef188076e\") " pod="openshift-ingress-canary/ingress-canary-9qvz2" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.581716 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-r4vsk" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.594603 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-2mj6l" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.597661 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r98nv\" (UniqueName: \"kubernetes.io/projected/b0ea38e2-3e31-4208-a918-2859626f0048-kube-api-access-r98nv\") pod \"collect-profiles-29335965-cqjjn\" (UID: \"b0ea38e2-3e31-4208-a918-2859626f0048\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335965-cqjjn" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.598353 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29335965-cqjjn" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.606250 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-97x45" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.608326 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-9jbmn"] Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.608482 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-zqjmp"] Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.614423 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-97fqd" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.618973 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-6qdd5"] Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.626658 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-jkbfd" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.627245 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-f7bf8" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.637003 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-2q6fr" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.641162 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-9qvz2" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.641388 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 04:53:41 crc kubenswrapper[4651]: E1011 04:53:41.641528 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 04:53:42.141495714 +0000 UTC m=+143.037728510 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.641654 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-v5ktt\" (UID: \"4adc8e3f-786a-4d1b-985b-3f39cf67767a\") " pod="openshift-image-registry/image-registry-697d97f7c8-v5ktt" Oct 11 04:53:41 crc kubenswrapper[4651]: E1011 04:53:41.642134 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 04:53:42.14211916 +0000 UTC m=+143.038351956 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-v5ktt" (UID: "4adc8e3f-786a-4d1b-985b-3f39cf67767a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.646867 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-xx5gz" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.652125 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-ws6wr" event={"ID":"7bafdf97-6219-440a-a3b0-49c55c2a3b5b","Type":"ContainerStarted","Data":"5fe249d59090dcb7efaabcd85d5ab362937499eb4129fa8960edd5e0179d0bef"} Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.653312 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-w55bl" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.654307 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-2jmxt" event={"ID":"67e0d9bc-63c7-4509-b804-d63705caa189","Type":"ContainerStarted","Data":"cdb7e749e4086c0b83dbf6465f728354891ebdd80f00cb87d451c4639854dd13"} Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.655385 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-p8tts"] Oct 11 04:53:41 crc kubenswrapper[4651]: W1011 04:53:41.658218 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod56d26781_8236_4a07_9dbf_d0b926cba29a.slice/crio-5c889574b20e879e424296af46860bdbaa9c63266783b3f7e3991765a44e93d3 WatchSource:0}: Error finding container 5c889574b20e879e424296af46860bdbaa9c63266783b3f7e3991765a44e93d3: Status 404 returned error can't find the container with id 5c889574b20e879e424296af46860bdbaa9c63266783b3f7e3991765a44e93d3 Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.661375 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-pxhmv" event={"ID":"e011176f-c96e-4823-89f6-648d574d1ef4","Type":"ContainerStarted","Data":"ff625e9d15650151fc30eb4d9dbb7a5c035f2545285fa77e227a6dcfbd063f45"} Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.661416 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-pxhmv" event={"ID":"e011176f-c96e-4823-89f6-648d574d1ef4","Type":"ContainerStarted","Data":"5ea977a1d9bbe1b13db202ca8c06da31cad358e09d29ff6676359a6024467673"} Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.664165 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-mdhlw" event={"ID":"aa722226-d2d2-4122-93fa-1aeee25b7868","Type":"ContainerStarted","Data":"adaa5c63f2a67b85868c927da3c9444b4b9e0131b3ff232a7ed1100e21de3d58"} Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.667018 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-s9qnd" event={"ID":"5c2e6635-02f1-4869-9d20-7577116611ba","Type":"ContainerStarted","Data":"1acdb04002cda059cae0d3b2ba97fae3881ba0db9b1b33f454109a76551e7fb6"} Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.667703 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-rgxlz" event={"ID":"391e51dc-8070-40bf-ac61-33c1ef37c72b","Type":"ContainerStarted","Data":"fe82cf64a6e695b1077075d248bfa1637e80e41cf2d5a342fdd6bfc381d63a66"} Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.669205 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-2f67z" event={"ID":"5fb1ac4f-b0d1-4314-ac06-d887654fa3f5","Type":"ContainerStarted","Data":"b986dbad3c28a294fc0cb421a0e30153440467cd8c885c871f87028413fa4ec3"} Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.669230 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-2f67z" event={"ID":"5fb1ac4f-b0d1-4314-ac06-d887654fa3f5","Type":"ContainerStarted","Data":"b0592bd8150ea461d001c05935560369bf40b643807f2022b11e0ee5accbacc4"} Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.670447 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-2hhkn" event={"ID":"2797c45d-e1d7-44d7-b936-44048593f540","Type":"ContainerStarted","Data":"f0eb6fa8640c101b0068de5b4f8a8204955d3bbda560a4f1b048280bffe2774b"} Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.671451 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-n4hfz" event={"ID":"49755973-5d14-4c72-9858-7edca1f2c2ee","Type":"ContainerStarted","Data":"65809da5fb71c7abaf4c9557f4966f5c9f52156146608327b87019c93a9d7968"} Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.672109 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-thfrv" event={"ID":"d32c16ff-cc90-4759-a695-405d76694b39","Type":"ContainerStarted","Data":"ba73190a205922eae89a31ec7a8eb3cd1bea61706b3d38dfb203d84b25188d18"} Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.672843 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-2mj56" event={"ID":"b590e4d5-1684-4e2f-b5e9-8fbf00db4546","Type":"ContainerStarted","Data":"ef3011716bd5c8741d595951e4b27158b3f0bc69d056c3a5f2ecfc693dafe87b"} Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.673995 4651 generic.go:334] "Generic (PLEG): container finished" podID="f90412cd-2f6a-4322-b3b7-29904de3b09c" containerID="a407d954bea7304380b40ea5274b48288996e3c5c54fd7b3b0dbbcb0b53e70f4" exitCode=0 Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.674232 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-8fhsm" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.675054 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xzfcs" event={"ID":"f90412cd-2f6a-4322-b3b7-29904de3b09c","Type":"ContainerDied","Data":"a407d954bea7304380b40ea5274b48288996e3c5c54fd7b3b0dbbcb0b53e70f4"} Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.675093 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xzfcs" event={"ID":"f90412cd-2f6a-4322-b3b7-29904de3b09c","Type":"ContainerStarted","Data":"cd5ee42de76d6becbb1a4050e0d72a63bce1008059466900e2abed85888724cc"} Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.681990 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-pkhrx" event={"ID":"27505683-e595-4855-8a29-aceee78542b6","Type":"ContainerStarted","Data":"48ef3c3c050ae4f5d07a478caffe75b28d9ca82f6819742e41ae4b9210782c2e"} Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.682023 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-pkhrx" event={"ID":"27505683-e595-4855-8a29-aceee78542b6","Type":"ContainerStarted","Data":"ad8f515568c1c63c51315eb3691dcc49eb9d576df74d6d7498aa8f5710f827e1"} Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.682353 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-pkhrx" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.716675 4651 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-pkhrx container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.5:8443/healthz\": dial tcp 10.217.0.5:8443: connect: connection refused" start-of-body= Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.716724 4651 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-pkhrx" podUID="27505683-e595-4855-8a29-aceee78542b6" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.5:8443/healthz\": dial tcp 10.217.0.5:8443: connect: connection refused" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.723492 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-hf4jk"] Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.725483 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-xqg79"] Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.742938 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 04:53:41 crc kubenswrapper[4651]: E1011 04:53:41.753741 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 04:53:42.253678089 +0000 UTC m=+143.149910885 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.781304 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-htjqx"] Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.784638 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-27wgh"] Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.791323 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4lsqd"] Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.796093 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-pc67v"] Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.821586 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-h9rlq"] Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.844911 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-v5ktt\" (UID: \"4adc8e3f-786a-4d1b-985b-3f39cf67767a\") " pod="openshift-image-registry/image-registry-697d97f7c8-v5ktt" Oct 11 04:53:41 crc kubenswrapper[4651]: E1011 04:53:41.845663 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 04:53:42.345646926 +0000 UTC m=+143.241879722 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-v5ktt" (UID: "4adc8e3f-786a-4d1b-985b-3f39cf67767a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.908944 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-7tk5h"] Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.930043 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-k85tm"] Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.933449 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-pxhmv" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.937425 4651 patch_prober.go:28] interesting pod/router-default-5444994796-pxhmv container/router namespace/openshift-ingress: Startup probe status=failure output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" start-of-body= Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.937468 4651 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-pxhmv" podUID="e011176f-c96e-4823-89f6-648d574d1ef4" containerName="router" probeResult="failure" output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.946098 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 04:53:41 crc kubenswrapper[4651]: E1011 04:53:41.946270 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 04:53:42.446253689 +0000 UTC m=+143.342486485 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.946424 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-v5ktt\" (UID: \"4adc8e3f-786a-4d1b-985b-3f39cf67767a\") " pod="openshift-image-registry/image-registry-697d97f7c8-v5ktt" Oct 11 04:53:41 crc kubenswrapper[4651]: E1011 04:53:41.946667 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 04:53:42.446659879 +0000 UTC m=+143.342892675 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-v5ktt" (UID: "4adc8e3f-786a-4d1b-985b-3f39cf67767a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 04:53:41 crc kubenswrapper[4651]: W1011 04:53:41.961419 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6cd78244_1fbc_4c7b_81ee_be4a2e1eda22.slice/crio-dfc9756285d6c0af2755aa6f0c2886eedfb31a1f7ff91be99d21e8b1a9136f03 WatchSource:0}: Error finding container dfc9756285d6c0af2755aa6f0c2886eedfb31a1f7ff91be99d21e8b1a9136f03: Status 404 returned error can't find the container with id dfc9756285d6c0af2755aa6f0c2886eedfb31a1f7ff91be99d21e8b1a9136f03 Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.969165 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-pxhmv" podStartSLOduration=122.969150878 podStartE2EDuration="2m2.969150878s" podCreationTimestamp="2025-10-11 04:51:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 04:53:41.968429109 +0000 UTC m=+142.864661905" watchObservedRunningTime="2025-10-11 04:53:41.969150878 +0000 UTC m=+142.865383674" Oct 11 04:53:41 crc kubenswrapper[4651]: W1011 04:53:41.971243 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod05f54403_9045_4e0b_94af_636e78ba5c52.slice/crio-1161d22c5c08f20ca3069e710d972c396d6d165388e8d081a9465e797884521d WatchSource:0}: Error finding container 1161d22c5c08f20ca3069e710d972c396d6d165388e8d081a9465e797884521d: Status 404 returned error can't find the container with id 1161d22c5c08f20ca3069e710d972c396d6d165388e8d081a9465e797884521d Oct 11 04:53:41 crc kubenswrapper[4651]: I1011 04:53:41.992097 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-cd92z"] Oct 11 04:53:42 crc kubenswrapper[4651]: I1011 04:53:42.046801 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-2lxzd"] Oct 11 04:53:42 crc kubenswrapper[4651]: I1011 04:53:42.046836 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 04:53:42 crc kubenswrapper[4651]: E1011 04:53:42.046902 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 04:53:42.546888952 +0000 UTC m=+143.443121748 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 04:53:42 crc kubenswrapper[4651]: I1011 04:53:42.047506 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-v5ktt\" (UID: \"4adc8e3f-786a-4d1b-985b-3f39cf67767a\") " pod="openshift-image-registry/image-registry-697d97f7c8-v5ktt" Oct 11 04:53:42 crc kubenswrapper[4651]: E1011 04:53:42.047892 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 04:53:42.547870218 +0000 UTC m=+143.444103014 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-v5ktt" (UID: "4adc8e3f-786a-4d1b-985b-3f39cf67767a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 04:53:42 crc kubenswrapper[4651]: I1011 04:53:42.148303 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 04:53:42 crc kubenswrapper[4651]: E1011 04:53:42.148509 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 04:53:42.648490501 +0000 UTC m=+143.544723297 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 04:53:42 crc kubenswrapper[4651]: I1011 04:53:42.148607 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-v5ktt\" (UID: \"4adc8e3f-786a-4d1b-985b-3f39cf67767a\") " pod="openshift-image-registry/image-registry-697d97f7c8-v5ktt" Oct 11 04:53:42 crc kubenswrapper[4651]: E1011 04:53:42.148898 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 04:53:42.648891001 +0000 UTC m=+143.545123797 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-v5ktt" (UID: "4adc8e3f-786a-4d1b-985b-3f39cf67767a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 04:53:42 crc kubenswrapper[4651]: I1011 04:53:42.204807 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-zbqhj"] Oct 11 04:53:42 crc kubenswrapper[4651]: I1011 04:53:42.252829 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 04:53:42 crc kubenswrapper[4651]: E1011 04:53:42.253222 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 04:53:42.753206931 +0000 UTC m=+143.649439727 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 04:53:42 crc kubenswrapper[4651]: I1011 04:53:42.349181 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-97x45"] Oct 11 04:53:42 crc kubenswrapper[4651]: I1011 04:53:42.356745 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-xx5gz"] Oct 11 04:53:42 crc kubenswrapper[4651]: I1011 04:53:42.365019 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-v5ktt\" (UID: \"4adc8e3f-786a-4d1b-985b-3f39cf67767a\") " pod="openshift-image-registry/image-registry-697d97f7c8-v5ktt" Oct 11 04:53:42 crc kubenswrapper[4651]: E1011 04:53:42.365477 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 04:53:42.865466079 +0000 UTC m=+143.761698875 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-v5ktt" (UID: "4adc8e3f-786a-4d1b-985b-3f39cf67767a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 04:53:42 crc kubenswrapper[4651]: I1011 04:53:42.451641 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-pkhrx" podStartSLOduration=122.451627443 podStartE2EDuration="2m2.451627443s" podCreationTimestamp="2025-10-11 04:51:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 04:53:42.451028928 +0000 UTC m=+143.347261734" watchObservedRunningTime="2025-10-11 04:53:42.451627443 +0000 UTC m=+143.347860239" Oct 11 04:53:42 crc kubenswrapper[4651]: I1011 04:53:42.467922 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 04:53:42 crc kubenswrapper[4651]: E1011 04:53:42.468120 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 04:53:42.968073244 +0000 UTC m=+143.864306040 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 04:53:42 crc kubenswrapper[4651]: I1011 04:53:42.468205 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-v5ktt\" (UID: \"4adc8e3f-786a-4d1b-985b-3f39cf67767a\") " pod="openshift-image-registry/image-registry-697d97f7c8-v5ktt" Oct 11 04:53:42 crc kubenswrapper[4651]: E1011 04:53:42.468681 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 04:53:42.968671869 +0000 UTC m=+143.864904665 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-v5ktt" (UID: "4adc8e3f-786a-4d1b-985b-3f39cf67767a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 04:53:42 crc kubenswrapper[4651]: I1011 04:53:42.480222 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-8fhsm"] Oct 11 04:53:42 crc kubenswrapper[4651]: I1011 04:53:42.569315 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 04:53:42 crc kubenswrapper[4651]: E1011 04:53:42.569705 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 04:53:43.069686533 +0000 UTC m=+143.965919339 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 04:53:42 crc kubenswrapper[4651]: I1011 04:53:42.576632 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-r4vsk"] Oct 11 04:53:42 crc kubenswrapper[4651]: I1011 04:53:42.620847 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-2mj6l"] Oct 11 04:53:42 crc kubenswrapper[4651]: I1011 04:53:42.628357 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29335965-cqjjn"] Oct 11 04:53:42 crc kubenswrapper[4651]: I1011 04:53:42.634258 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-2q6fr"] Oct 11 04:53:42 crc kubenswrapper[4651]: I1011 04:53:42.644323 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-2f67z" podStartSLOduration=123.644304885 podStartE2EDuration="2m3.644304885s" podCreationTimestamp="2025-10-11 04:51:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 04:53:42.64332375 +0000 UTC m=+143.539556556" watchObservedRunningTime="2025-10-11 04:53:42.644304885 +0000 UTC m=+143.540537681" Oct 11 04:53:42 crc kubenswrapper[4651]: W1011 04:53:42.655997 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod12479473_09c0_4d87_9075_0b37754123a6.slice/crio-a8cd1760c3561f9e1b6915be089f2a3ac6c12190832615a7f939cf3d8c7ddc6c WatchSource:0}: Error finding container a8cd1760c3561f9e1b6915be089f2a3ac6c12190832615a7f939cf3d8c7ddc6c: Status 404 returned error can't find the container with id a8cd1760c3561f9e1b6915be089f2a3ac6c12190832615a7f939cf3d8c7ddc6c Oct 11 04:53:42 crc kubenswrapper[4651]: I1011 04:53:42.670926 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-v5ktt\" (UID: \"4adc8e3f-786a-4d1b-985b-3f39cf67767a\") " pod="openshift-image-registry/image-registry-697d97f7c8-v5ktt" Oct 11 04:53:42 crc kubenswrapper[4651]: E1011 04:53:42.671337 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 04:53:43.171321462 +0000 UTC m=+144.067554248 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-v5ktt" (UID: "4adc8e3f-786a-4d1b-985b-3f39cf67767a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 04:53:42 crc kubenswrapper[4651]: I1011 04:53:42.694200 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-7tk5h" event={"ID":"6cd78244-1fbc-4c7b-81ee-be4a2e1eda22","Type":"ContainerStarted","Data":"dfc9756285d6c0af2755aa6f0c2886eedfb31a1f7ff91be99d21e8b1a9136f03"} Oct 11 04:53:42 crc kubenswrapper[4651]: I1011 04:53:42.698550 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-cd92z" event={"ID":"a25ac582-d0a6-4bd7-a9c9-dbed70086212","Type":"ContainerStarted","Data":"9968b94eaba2f738b617766cfc9bae1b86327fdc22fa484823748347655cdb24"} Oct 11 04:53:42 crc kubenswrapper[4651]: I1011 04:53:42.703996 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-xqg79" event={"ID":"1f6c3e28-3c8b-41d2-a314-95103ddb7ab4","Type":"ContainerStarted","Data":"82dddd0b7a3c52d08d886c16e5fc7552348f2a4f1b242214c2f273a9a7c5b942"} Oct 11 04:53:42 crc kubenswrapper[4651]: I1011 04:53:42.705519 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-f7bf8"] Oct 11 04:53:42 crc kubenswrapper[4651]: I1011 04:53:42.707651 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-hf4jk" event={"ID":"29ad03db-a1b8-4cf1-b603-d9e1e61359db","Type":"ContainerStarted","Data":"0b9310ff2a2b1f7c72e643618d81304df53846b04b7824d7fcb81475a27b1d3e"} Oct 11 04:53:42 crc kubenswrapper[4651]: I1011 04:53:42.710787 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-9qvz2"] Oct 11 04:53:42 crc kubenswrapper[4651]: I1011 04:53:42.719158 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-s9qnd" event={"ID":"5c2e6635-02f1-4869-9d20-7577116611ba","Type":"ContainerStarted","Data":"46161b0d44757d10d3f38d08897cb95d8e7b5f4138ee21892824d549b857ace1"} Oct 11 04:53:42 crc kubenswrapper[4651]: I1011 04:53:42.719340 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-s9qnd" Oct 11 04:53:42 crc kubenswrapper[4651]: I1011 04:53:42.725497 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-2lxzd" event={"ID":"5ad2068c-8217-424f-aa22-dfb57604ea05","Type":"ContainerStarted","Data":"2d17a7eb4c8e34eb9cdbba546d13b43b24947d3daa3aba5aeeb5f121e6b03f20"} Oct 11 04:53:42 crc kubenswrapper[4651]: I1011 04:53:42.728975 4651 generic.go:334] "Generic (PLEG): container finished" podID="49755973-5d14-4c72-9858-7edca1f2c2ee" containerID="d9a0f38f247df1a394d2d2e668c540cd8b5ef2b06892ac7a06a5a7435b715f18" exitCode=0 Oct 11 04:53:42 crc kubenswrapper[4651]: I1011 04:53:42.729037 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-n4hfz" event={"ID":"49755973-5d14-4c72-9858-7edca1f2c2ee","Type":"ContainerDied","Data":"d9a0f38f247df1a394d2d2e668c540cd8b5ef2b06892ac7a06a5a7435b715f18"} Oct 11 04:53:42 crc kubenswrapper[4651]: I1011 04:53:42.731348 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-2jmxt" event={"ID":"67e0d9bc-63c7-4509-b804-d63705caa189","Type":"ContainerStarted","Data":"302fc9a038e3babb7ecbedf093225597bd12aebe7027dfae6032c1b2c93e7101"} Oct 11 04:53:42 crc kubenswrapper[4651]: W1011 04:53:42.732245 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5b16fb63_ca55_4c40_8d92_21477dd79984.slice/crio-8a7fd0f90269a39b461615adcb7ce3994b2faca56a7a619a8017235871bf89dc WatchSource:0}: Error finding container 8a7fd0f90269a39b461615adcb7ce3994b2faca56a7a619a8017235871bf89dc: Status 404 returned error can't find the container with id 8a7fd0f90269a39b461615adcb7ce3994b2faca56a7a619a8017235871bf89dc Oct 11 04:53:42 crc kubenswrapper[4651]: I1011 04:53:42.733133 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-xx5gz" event={"ID":"c88b0ab7-f277-4d78-937d-9268c3e34eae","Type":"ContainerStarted","Data":"4e025b9e3d4232abcbfa3cfebb255e1b4fa437253bc8415bc3fb1028f0b654d4"} Oct 11 04:53:42 crc kubenswrapper[4651]: I1011 04:53:42.733963 4651 patch_prober.go:28] interesting pod/downloads-7954f5f757-s9qnd container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Oct 11 04:53:42 crc kubenswrapper[4651]: I1011 04:53:42.734004 4651 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-s9qnd" podUID="5c2e6635-02f1-4869-9d20-7577116611ba" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" Oct 11 04:53:42 crc kubenswrapper[4651]: I1011 04:53:42.736981 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-k85tm" event={"ID":"05f54403-9045-4e0b-94af-636e78ba5c52","Type":"ContainerStarted","Data":"0b98212539c13cefa69e2b3c3eb5d34b8ee161ea5d64f45e76b44f090c36f45e"} Oct 11 04:53:42 crc kubenswrapper[4651]: I1011 04:53:42.737018 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-k85tm" event={"ID":"05f54403-9045-4e0b-94af-636e78ba5c52","Type":"ContainerStarted","Data":"1161d22c5c08f20ca3069e710d972c396d6d165388e8d081a9465e797884521d"} Oct 11 04:53:42 crc kubenswrapper[4651]: I1011 04:53:42.740748 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-thfrv" event={"ID":"d32c16ff-cc90-4759-a695-405d76694b39","Type":"ContainerStarted","Data":"ba12b1dafec5413daa6b239003a1ef2120f705849a3d167612d37259ee420e72"} Oct 11 04:53:42 crc kubenswrapper[4651]: W1011 04:53:42.740997 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf6aa26e9_64fd_4591_9824_bb8b4ddfce0f.slice/crio-5054781e73b57917b8f849f4e27f99d85a9f68c053802f86f64c212c17f629e5 WatchSource:0}: Error finding container 5054781e73b57917b8f849f4e27f99d85a9f68c053802f86f64c212c17f629e5: Status 404 returned error can't find the container with id 5054781e73b57917b8f849f4e27f99d85a9f68c053802f86f64c212c17f629e5 Oct 11 04:53:42 crc kubenswrapper[4651]: W1011 04:53:42.743316 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod928ecea2_0af7_48bf_a442_ee6c6c86d00c.slice/crio-8bc4a2d425fdced110cc2689ba2dfd55fc3c785783943ea2586a636402adca50 WatchSource:0}: Error finding container 8bc4a2d425fdced110cc2689ba2dfd55fc3c785783943ea2586a636402adca50: Status 404 returned error can't find the container with id 8bc4a2d425fdced110cc2689ba2dfd55fc3c785783943ea2586a636402adca50 Oct 11 04:53:42 crc kubenswrapper[4651]: W1011 04:53:42.744048 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb0ea38e2_3e31_4208_a918_2859626f0048.slice/crio-d9a6987d3d1a73c557c11ad55d2d39de9183a18ea971a29f08992a6997f758e5 WatchSource:0}: Error finding container d9a6987d3d1a73c557c11ad55d2d39de9183a18ea971a29f08992a6997f758e5: Status 404 returned error can't find the container with id d9a6987d3d1a73c557c11ad55d2d39de9183a18ea971a29f08992a6997f758e5 Oct 11 04:53:42 crc kubenswrapper[4651]: I1011 04:53:42.745724 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-2mj56" event={"ID":"b590e4d5-1684-4e2f-b5e9-8fbf00db4546","Type":"ContainerStarted","Data":"f5324512d31e7765ad4f6c16a990dbc208768c4664a10def742b13c8d4eb9f7c"} Oct 11 04:53:42 crc kubenswrapper[4651]: I1011 04:53:42.748015 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-8fhsm" event={"ID":"12479473-09c0-4d87-9075-0b37754123a6","Type":"ContainerStarted","Data":"a8cd1760c3561f9e1b6915be089f2a3ac6c12190832615a7f939cf3d8c7ddc6c"} Oct 11 04:53:42 crc kubenswrapper[4651]: I1011 04:53:42.750099 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-p8tts" event={"ID":"d05dd104-27f8-410b-8a71-68101c58d906","Type":"ContainerStarted","Data":"7bde1846b14dce67500e078b0055438923eff4562e25d8309ed802970b34105a"} Oct 11 04:53:42 crc kubenswrapper[4651]: I1011 04:53:42.753969 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-9jbmn" event={"ID":"56d26781-8236-4a07-9dbf-d0b926cba29a","Type":"ContainerStarted","Data":"5c889574b20e879e424296af46860bdbaa9c63266783b3f7e3991765a44e93d3"} Oct 11 04:53:42 crc kubenswrapper[4651]: I1011 04:53:42.756403 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-97fqd"] Oct 11 04:53:42 crc kubenswrapper[4651]: I1011 04:53:42.761757 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4lsqd" event={"ID":"19358ccd-5ef0-4416-81db-347114a9bbe4","Type":"ContainerStarted","Data":"3f393c461a8e7e89bdfbc6860f84ae3a4fd0c1985557e1d9c07bdefd32541d42"} Oct 11 04:53:42 crc kubenswrapper[4651]: I1011 04:53:42.762590 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-w55bl" event={"ID":"da275bab-31cd-4cf4-bb88-820b837416ec","Type":"ContainerStarted","Data":"af267051d282448edae13bccd21c7ff37d18c8834a867aebb22ee254398b3449"} Oct 11 04:53:42 crc kubenswrapper[4651]: I1011 04:53:42.763741 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-97x45" event={"ID":"3bc42b99-e1e0-4c88-90fd-8ba933095287","Type":"ContainerStarted","Data":"8a95555aa66abf4aae932963ce066f729d58579ef9e0d6a0cece6584c3a5d101"} Oct 11 04:53:42 crc kubenswrapper[4651]: I1011 04:53:42.766470 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-ws6wr" event={"ID":"7bafdf97-6219-440a-a3b0-49c55c2a3b5b","Type":"ContainerStarted","Data":"18e615fbb412110665bd63820428882f3372015885e9edaa708b36b337a9d78b"} Oct 11 04:53:42 crc kubenswrapper[4651]: I1011 04:53:42.773286 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-6qdd5" event={"ID":"0195bd92-5ff5-4c4b-86a6-360d9620f118","Type":"ContainerStarted","Data":"5aeb04affd845fb29ce12fbc2a58e6f2b43bdd582aabf44f5a73b11dee44c7a7"} Oct 11 04:53:42 crc kubenswrapper[4651]: I1011 04:53:42.774806 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 04:53:42 crc kubenswrapper[4651]: E1011 04:53:42.774921 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 04:53:43.274901563 +0000 UTC m=+144.171134359 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 04:53:42 crc kubenswrapper[4651]: I1011 04:53:42.777600 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-v5ktt\" (UID: \"4adc8e3f-786a-4d1b-985b-3f39cf67767a\") " pod="openshift-image-registry/image-registry-697d97f7c8-v5ktt" Oct 11 04:53:42 crc kubenswrapper[4651]: E1011 04:53:42.777874 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 04:53:43.27785681 +0000 UTC m=+144.174089606 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-v5ktt" (UID: "4adc8e3f-786a-4d1b-985b-3f39cf67767a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 04:53:42 crc kubenswrapper[4651]: I1011 04:53:42.787656 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-zqjmp" event={"ID":"107de3f1-b5a8-41e4-bb3b-a34e4e916390","Type":"ContainerStarted","Data":"23094e7600c7d63ec080fb45512c04cf3c76f659db2de174e3d425c515f765be"} Oct 11 04:53:42 crc kubenswrapper[4651]: I1011 04:53:42.789768 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-jkbfd"] Oct 11 04:53:42 crc kubenswrapper[4651]: I1011 04:53:42.793663 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-27wgh" event={"ID":"f8fd1293-3d68-4dd2-bc12-8f7c02017bcd","Type":"ContainerStarted","Data":"c8afb9465d8378fe19e39f9b0c32ed4b205c9d40014ff57fad09d94a26d9ac2d"} Oct 11 04:53:42 crc kubenswrapper[4651]: I1011 04:53:42.802246 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-rgxlz" event={"ID":"391e51dc-8070-40bf-ac61-33c1ef37c72b","Type":"ContainerStarted","Data":"6a4a61d21abe1068ed02bae613485c8c51d2175492c257126fbe2606ce73e4fe"} Oct 11 04:53:42 crc kubenswrapper[4651]: I1011 04:53:42.814619 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-htjqx" event={"ID":"88ee50e1-3036-4557-8dbb-6aefcc8df336","Type":"ContainerStarted","Data":"1e267a76b1ad914969a0095cce7eb0c33cb1bd02df7b914dd1c0c3e7f2119da8"} Oct 11 04:53:42 crc kubenswrapper[4651]: I1011 04:53:42.817906 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-h9rlq" event={"ID":"fc942eea-0c2a-474f-bc24-c6d5fb171c79","Type":"ContainerStarted","Data":"13578dbfc3f302e94f4f3af302f01c6851db78d1c20081c283902bf12bc25b46"} Oct 11 04:53:42 crc kubenswrapper[4651]: I1011 04:53:42.821759 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-pc67v" event={"ID":"ccf042b9-768a-413d-bc29-58ab74c06fc9","Type":"ContainerStarted","Data":"7f446fc7e5c783c26633082ac1f62a5d8e42981388f8754731888287b4d0b2a7"} Oct 11 04:53:42 crc kubenswrapper[4651]: I1011 04:53:42.839224 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-2hhkn" event={"ID":"2797c45d-e1d7-44d7-b936-44048593f540","Type":"ContainerStarted","Data":"7a26ece8fa97124105a03af84610f81b09c42277ba272cb10e79f7159dd4e72a"} Oct 11 04:53:42 crc kubenswrapper[4651]: I1011 04:53:42.846740 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-zbqhj" event={"ID":"82548e7e-b445-4506-b42c-c9c620e82267","Type":"ContainerStarted","Data":"51222d56cb4a7956fee36308805085d141839d399b66cdbc3d51ad212954b447"} Oct 11 04:53:42 crc kubenswrapper[4651]: I1011 04:53:42.883002 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 04:53:42 crc kubenswrapper[4651]: E1011 04:53:42.883259 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 04:53:43.383241088 +0000 UTC m=+144.279473874 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 04:53:42 crc kubenswrapper[4651]: I1011 04:53:42.883369 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-v5ktt\" (UID: \"4adc8e3f-786a-4d1b-985b-3f39cf67767a\") " pod="openshift-image-registry/image-registry-697d97f7c8-v5ktt" Oct 11 04:53:42 crc kubenswrapper[4651]: E1011 04:53:42.889079 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 04:53:43.38906332 +0000 UTC m=+144.285296206 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-v5ktt" (UID: "4adc8e3f-786a-4d1b-985b-3f39cf67767a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 04:53:42 crc kubenswrapper[4651]: I1011 04:53:42.943113 4651 patch_prober.go:28] interesting pod/router-default-5444994796-pxhmv container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 11 04:53:42 crc kubenswrapper[4651]: [-]has-synced failed: reason withheld Oct 11 04:53:42 crc kubenswrapper[4651]: [+]process-running ok Oct 11 04:53:42 crc kubenswrapper[4651]: healthz check failed Oct 11 04:53:42 crc kubenswrapper[4651]: I1011 04:53:42.943168 4651 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-pxhmv" podUID="e011176f-c96e-4823-89f6-648d574d1ef4" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 11 04:53:42 crc kubenswrapper[4651]: I1011 04:53:42.985089 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 04:53:42 crc kubenswrapper[4651]: E1011 04:53:42.986130 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 04:53:43.486080689 +0000 UTC m=+144.382313485 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 04:53:43 crc kubenswrapper[4651]: I1011 04:53:43.086603 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-v5ktt\" (UID: \"4adc8e3f-786a-4d1b-985b-3f39cf67767a\") " pod="openshift-image-registry/image-registry-697d97f7c8-v5ktt" Oct 11 04:53:43 crc kubenswrapper[4651]: E1011 04:53:43.087461 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 04:53:43.587444951 +0000 UTC m=+144.483677747 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-v5ktt" (UID: "4adc8e3f-786a-4d1b-985b-3f39cf67767a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 04:53:43 crc kubenswrapper[4651]: I1011 04:53:43.189296 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 04:53:43 crc kubenswrapper[4651]: E1011 04:53:43.190319 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 04:53:43.690289953 +0000 UTC m=+144.586522749 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 04:53:43 crc kubenswrapper[4651]: I1011 04:53:43.193422 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-pkhrx" Oct 11 04:53:43 crc kubenswrapper[4651]: I1011 04:53:43.291492 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-v5ktt\" (UID: \"4adc8e3f-786a-4d1b-985b-3f39cf67767a\") " pod="openshift-image-registry/image-registry-697d97f7c8-v5ktt" Oct 11 04:53:43 crc kubenswrapper[4651]: E1011 04:53:43.291830 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 04:53:43.791803569 +0000 UTC m=+144.688036365 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-v5ktt" (UID: "4adc8e3f-786a-4d1b-985b-3f39cf67767a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 04:53:43 crc kubenswrapper[4651]: I1011 04:53:43.397110 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 04:53:43 crc kubenswrapper[4651]: E1011 04:53:43.397262 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 04:53:43.897231798 +0000 UTC m=+144.793464594 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 04:53:43 crc kubenswrapper[4651]: I1011 04:53:43.397959 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-v5ktt\" (UID: \"4adc8e3f-786a-4d1b-985b-3f39cf67767a\") " pod="openshift-image-registry/image-registry-697d97f7c8-v5ktt" Oct 11 04:53:43 crc kubenswrapper[4651]: E1011 04:53:43.398744 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 04:53:43.898729687 +0000 UTC m=+144.794962483 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-v5ktt" (UID: "4adc8e3f-786a-4d1b-985b-3f39cf67767a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 04:53:43 crc kubenswrapper[4651]: I1011 04:53:43.446069 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-thfrv" podStartSLOduration=124.446048915 podStartE2EDuration="2m4.446048915s" podCreationTimestamp="2025-10-11 04:51:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 04:53:43.445317876 +0000 UTC m=+144.341550682" watchObservedRunningTime="2025-10-11 04:53:43.446048915 +0000 UTC m=+144.342281711" Oct 11 04:53:43 crc kubenswrapper[4651]: I1011 04:53:43.499358 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 04:53:43 crc kubenswrapper[4651]: E1011 04:53:43.500104 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 04:53:44.000087529 +0000 UTC m=+144.896320315 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 04:53:43 crc kubenswrapper[4651]: I1011 04:53:43.534849 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-2jmxt" podStartSLOduration=124.534806818 podStartE2EDuration="2m4.534806818s" podCreationTimestamp="2025-10-11 04:51:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 04:53:43.522224419 +0000 UTC m=+144.418457215" watchObservedRunningTime="2025-10-11 04:53:43.534806818 +0000 UTC m=+144.431039624" Oct 11 04:53:43 crc kubenswrapper[4651]: I1011 04:53:43.552595 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-s9qnd" podStartSLOduration=124.552578803 podStartE2EDuration="2m4.552578803s" podCreationTimestamp="2025-10-11 04:51:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 04:53:43.549382639 +0000 UTC m=+144.445615435" watchObservedRunningTime="2025-10-11 04:53:43.552578803 +0000 UTC m=+144.448811599" Oct 11 04:53:43 crc kubenswrapper[4651]: I1011 04:53:43.593216 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-ws6wr" podStartSLOduration=124.593197506 podStartE2EDuration="2m4.593197506s" podCreationTimestamp="2025-10-11 04:51:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 04:53:43.589020267 +0000 UTC m=+144.485253053" watchObservedRunningTime="2025-10-11 04:53:43.593197506 +0000 UTC m=+144.489430302" Oct 11 04:53:43 crc kubenswrapper[4651]: I1011 04:53:43.600926 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-v5ktt\" (UID: \"4adc8e3f-786a-4d1b-985b-3f39cf67767a\") " pod="openshift-image-registry/image-registry-697d97f7c8-v5ktt" Oct 11 04:53:43 crc kubenswrapper[4651]: E1011 04:53:43.601337 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 04:53:44.101325799 +0000 UTC m=+144.997558595 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-v5ktt" (UID: "4adc8e3f-786a-4d1b-985b-3f39cf67767a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 04:53:43 crc kubenswrapper[4651]: I1011 04:53:43.643167 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-2hhkn" podStartSLOduration=124.643148423 podStartE2EDuration="2m4.643148423s" podCreationTimestamp="2025-10-11 04:51:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 04:53:43.642921267 +0000 UTC m=+144.539154053" watchObservedRunningTime="2025-10-11 04:53:43.643148423 +0000 UTC m=+144.539381209" Oct 11 04:53:43 crc kubenswrapper[4651]: I1011 04:53:43.689686 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-htjqx" podStartSLOduration=124.68966277 podStartE2EDuration="2m4.68966277s" podCreationTimestamp="2025-10-11 04:51:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 04:53:43.677728638 +0000 UTC m=+144.573961444" watchObservedRunningTime="2025-10-11 04:53:43.68966277 +0000 UTC m=+144.585895566" Oct 11 04:53:43 crc kubenswrapper[4651]: I1011 04:53:43.702198 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 04:53:43 crc kubenswrapper[4651]: E1011 04:53:43.702423 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 04:53:44.202404094 +0000 UTC m=+145.098636890 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 04:53:43 crc kubenswrapper[4651]: I1011 04:53:43.702653 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-v5ktt\" (UID: \"4adc8e3f-786a-4d1b-985b-3f39cf67767a\") " pod="openshift-image-registry/image-registry-697d97f7c8-v5ktt" Oct 11 04:53:43 crc kubenswrapper[4651]: E1011 04:53:43.703157 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 04:53:44.203147583 +0000 UTC m=+145.099380379 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-v5ktt" (UID: "4adc8e3f-786a-4d1b-985b-3f39cf67767a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 04:53:43 crc kubenswrapper[4651]: I1011 04:53:43.803658 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 04:53:43 crc kubenswrapper[4651]: E1011 04:53:43.803780 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 04:53:44.303765596 +0000 UTC m=+145.199998392 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 04:53:43 crc kubenswrapper[4651]: I1011 04:53:43.804257 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-v5ktt\" (UID: \"4adc8e3f-786a-4d1b-985b-3f39cf67767a\") " pod="openshift-image-registry/image-registry-697d97f7c8-v5ktt" Oct 11 04:53:43 crc kubenswrapper[4651]: E1011 04:53:43.804617 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 04:53:44.304606208 +0000 UTC m=+145.200839004 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-v5ktt" (UID: "4adc8e3f-786a-4d1b-985b-3f39cf67767a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 04:53:43 crc kubenswrapper[4651]: I1011 04:53:43.867394 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-97x45" event={"ID":"3bc42b99-e1e0-4c88-90fd-8ba933095287","Type":"ContainerStarted","Data":"afab4202bdc18d5d35a057f2c9b6e5cd5bbb22f43b086f0f5718cbbfeaffa2d3"} Oct 11 04:53:43 crc kubenswrapper[4651]: I1011 04:53:43.885376 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-2lxzd" event={"ID":"5ad2068c-8217-424f-aa22-dfb57604ea05","Type":"ContainerStarted","Data":"642c0c6326bf4dad09f44f0da7312040f3505f87efca3ed9b2015ef727e37e45"} Oct 11 04:53:43 crc kubenswrapper[4651]: I1011 04:53:43.885425 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-9qvz2" event={"ID":"24845824-d21b-4793-8bcf-632ef188076e","Type":"ContainerStarted","Data":"ea17a005867e15d46594877b74ed0871e5955386bf3bf1f04fd1625e32df1788"} Oct 11 04:53:43 crc kubenswrapper[4651]: I1011 04:53:43.885435 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-9qvz2" event={"ID":"24845824-d21b-4793-8bcf-632ef188076e","Type":"ContainerStarted","Data":"2b9624c26a4db4b03f4da587b4ec1f7c2c7de999eaad3844105b787b9486bbb3"} Oct 11 04:53:43 crc kubenswrapper[4651]: I1011 04:53:43.896061 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-97x45" podStartSLOduration=124.896045751 podStartE2EDuration="2m4.896045751s" podCreationTimestamp="2025-10-11 04:51:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 04:53:43.888929595 +0000 UTC m=+144.785162391" watchObservedRunningTime="2025-10-11 04:53:43.896045751 +0000 UTC m=+144.792278547" Oct 11 04:53:43 crc kubenswrapper[4651]: I1011 04:53:43.907307 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 04:53:43 crc kubenswrapper[4651]: E1011 04:53:43.908180 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 04:53:44.408161878 +0000 UTC m=+145.304394674 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 04:53:43 crc kubenswrapper[4651]: I1011 04:53:43.910231 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-9qvz2" podStartSLOduration=5.910211602 podStartE2EDuration="5.910211602s" podCreationTimestamp="2025-10-11 04:53:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 04:53:43.907238564 +0000 UTC m=+144.803471370" watchObservedRunningTime="2025-10-11 04:53:43.910211602 +0000 UTC m=+144.806444398" Oct 11 04:53:43 crc kubenswrapper[4651]: I1011 04:53:43.912400 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-mdhlw" event={"ID":"aa722226-d2d2-4122-93fa-1aeee25b7868","Type":"ContainerStarted","Data":"73e794120aa8520e176984ed6ce2a9f457069a8b926e685343574ff6667ca685"} Oct 11 04:53:43 crc kubenswrapper[4651]: I1011 04:53:43.914184 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-mdhlw" Oct 11 04:53:43 crc kubenswrapper[4651]: I1011 04:53:43.917952 4651 patch_prober.go:28] interesting pod/console-operator-58897d9998-mdhlw container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.23:8443/readyz\": dial tcp 10.217.0.23:8443: connect: connection refused" start-of-body= Oct 11 04:53:43 crc kubenswrapper[4651]: I1011 04:53:43.918007 4651 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-mdhlw" podUID="aa722226-d2d2-4122-93fa-1aeee25b7868" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.23:8443/readyz\": dial tcp 10.217.0.23:8443: connect: connection refused" Oct 11 04:53:43 crc kubenswrapper[4651]: I1011 04:53:43.942014 4651 patch_prober.go:28] interesting pod/router-default-5444994796-pxhmv container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 11 04:53:43 crc kubenswrapper[4651]: [-]has-synced failed: reason withheld Oct 11 04:53:43 crc kubenswrapper[4651]: [+]process-running ok Oct 11 04:53:43 crc kubenswrapper[4651]: healthz check failed Oct 11 04:53:43 crc kubenswrapper[4651]: I1011 04:53:43.942078 4651 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-pxhmv" podUID="e011176f-c96e-4823-89f6-648d574d1ef4" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 11 04:53:43 crc kubenswrapper[4651]: I1011 04:53:43.942377 4651 generic.go:334] "Generic (PLEG): container finished" podID="fc942eea-0c2a-474f-bc24-c6d5fb171c79" containerID="0b04d0ba66239232bf24e386d5c2672cde7eab1b3428632b15eddb24eb79302f" exitCode=0 Oct 11 04:53:43 crc kubenswrapper[4651]: I1011 04:53:43.942484 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-h9rlq" event={"ID":"fc942eea-0c2a-474f-bc24-c6d5fb171c79","Type":"ContainerDied","Data":"0b04d0ba66239232bf24e386d5c2672cde7eab1b3428632b15eddb24eb79302f"} Oct 11 04:53:43 crc kubenswrapper[4651]: I1011 04:53:43.948657 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-mdhlw" podStartSLOduration=124.948639587 podStartE2EDuration="2m4.948639587s" podCreationTimestamp="2025-10-11 04:51:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 04:53:43.940699489 +0000 UTC m=+144.836932295" watchObservedRunningTime="2025-10-11 04:53:43.948639587 +0000 UTC m=+144.844872383" Oct 11 04:53:43 crc kubenswrapper[4651]: I1011 04:53:43.953697 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-zqjmp" event={"ID":"107de3f1-b5a8-41e4-bb3b-a34e4e916390","Type":"ContainerStarted","Data":"fd951d9b62c7c02273d67aa0f4781137f840877fede418b2fdcd4ccb35e554ea"} Oct 11 04:53:43 crc kubenswrapper[4651]: I1011 04:53:43.956794 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-zqjmp" Oct 11 04:53:43 crc kubenswrapper[4651]: I1011 04:53:43.963417 4651 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-zqjmp container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.16:6443/healthz\": dial tcp 10.217.0.16:6443: connect: connection refused" start-of-body= Oct 11 04:53:43 crc kubenswrapper[4651]: I1011 04:53:43.963460 4651 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-zqjmp" podUID="107de3f1-b5a8-41e4-bb3b-a34e4e916390" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.16:6443/healthz\": dial tcp 10.217.0.16:6443: connect: connection refused" Oct 11 04:53:43 crc kubenswrapper[4651]: I1011 04:53:43.987971 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-2q6fr" event={"ID":"928ecea2-0af7-48bf-a442-ee6c6c86d00c","Type":"ContainerStarted","Data":"8bc4a2d425fdced110cc2689ba2dfd55fc3c785783943ea2586a636402adca50"} Oct 11 04:53:43 crc kubenswrapper[4651]: I1011 04:53:43.996336 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-zqjmp" podStartSLOduration=124.996314575 podStartE2EDuration="2m4.996314575s" podCreationTimestamp="2025-10-11 04:51:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 04:53:43.995144484 +0000 UTC m=+144.891377310" watchObservedRunningTime="2025-10-11 04:53:43.996314575 +0000 UTC m=+144.892547371" Oct 11 04:53:44 crc kubenswrapper[4651]: I1011 04:53:44.008310 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-2mj56" event={"ID":"b590e4d5-1684-4e2f-b5e9-8fbf00db4546","Type":"ContainerStarted","Data":"e7dde5631c9515a43b077a50e0a283a0b37017c5c77e5ee7b5ef2d150f14e983"} Oct 11 04:53:44 crc kubenswrapper[4651]: I1011 04:53:44.009940 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-v5ktt\" (UID: \"4adc8e3f-786a-4d1b-985b-3f39cf67767a\") " pod="openshift-image-registry/image-registry-697d97f7c8-v5ktt" Oct 11 04:53:44 crc kubenswrapper[4651]: E1011 04:53:44.013381 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 04:53:44.513365171 +0000 UTC m=+145.409598077 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-v5ktt" (UID: "4adc8e3f-786a-4d1b-985b-3f39cf67767a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 04:53:44 crc kubenswrapper[4651]: I1011 04:53:44.033340 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-p8tts" event={"ID":"d05dd104-27f8-410b-8a71-68101c58d906","Type":"ContainerStarted","Data":"02cb0b695d3184b00d6840a68a83150543c83dbcb1be061b88e2d1cd215d5cca"} Oct 11 04:53:44 crc kubenswrapper[4651]: I1011 04:53:44.043138 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-2mj56" podStartSLOduration=125.043114809 podStartE2EDuration="2m5.043114809s" podCreationTimestamp="2025-10-11 04:51:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 04:53:44.030038697 +0000 UTC m=+144.926271513" watchObservedRunningTime="2025-10-11 04:53:44.043114809 +0000 UTC m=+144.939347605" Oct 11 04:53:44 crc kubenswrapper[4651]: I1011 04:53:44.105413 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29335965-cqjjn" event={"ID":"b0ea38e2-3e31-4208-a918-2859626f0048","Type":"ContainerStarted","Data":"99ab96f0ea2620e7a440e243341cdf40218cfd65fb90631d89fe8938009ebe36"} Oct 11 04:53:44 crc kubenswrapper[4651]: I1011 04:53:44.105789 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29335965-cqjjn" event={"ID":"b0ea38e2-3e31-4208-a918-2859626f0048","Type":"ContainerStarted","Data":"d9a6987d3d1a73c557c11ad55d2d39de9183a18ea971a29f08992a6997f758e5"} Oct 11 04:53:44 crc kubenswrapper[4651]: I1011 04:53:44.112624 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 04:53:44 crc kubenswrapper[4651]: E1011 04:53:44.112742 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 04:53:44.612726391 +0000 UTC m=+145.508959187 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 04:53:44 crc kubenswrapper[4651]: I1011 04:53:44.112990 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-v5ktt\" (UID: \"4adc8e3f-786a-4d1b-985b-3f39cf67767a\") " pod="openshift-image-registry/image-registry-697d97f7c8-v5ktt" Oct 11 04:53:44 crc kubenswrapper[4651]: I1011 04:53:44.129044 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-cd92z" event={"ID":"a25ac582-d0a6-4bd7-a9c9-dbed70086212","Type":"ContainerStarted","Data":"1edf5f7f4b453b2ff313cb8ab6e5910898f8a8a011c5170d79f1f578e458516e"} Oct 11 04:53:44 crc kubenswrapper[4651]: I1011 04:53:44.130349 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-cd92z" Oct 11 04:53:44 crc kubenswrapper[4651]: I1011 04:53:44.139389 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-xqg79" event={"ID":"1f6c3e28-3c8b-41d2-a314-95103ddb7ab4","Type":"ContainerStarted","Data":"b4cbb7b3aec8a1706756fb2a4122c0d152cb2cfb9a1fc65b07ec06957db39f61"} Oct 11 04:53:44 crc kubenswrapper[4651]: I1011 04:53:44.139487 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-xqg79" event={"ID":"1f6c3e28-3c8b-41d2-a314-95103ddb7ab4","Type":"ContainerStarted","Data":"57d196ac45b7ff19c82fb6c478bfeafd661d97b12327b1562af0bdcff9b05f76"} Oct 11 04:53:44 crc kubenswrapper[4651]: E1011 04:53:44.152756 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 04:53:44.652733078 +0000 UTC m=+145.548965874 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-v5ktt" (UID: "4adc8e3f-786a-4d1b-985b-3f39cf67767a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 04:53:44 crc kubenswrapper[4651]: I1011 04:53:44.156445 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29335965-cqjjn" podStartSLOduration=124.156424114 podStartE2EDuration="2m4.156424114s" podCreationTimestamp="2025-10-11 04:51:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 04:53:44.13524022 +0000 UTC m=+145.031473036" watchObservedRunningTime="2025-10-11 04:53:44.156424114 +0000 UTC m=+145.052656910" Oct 11 04:53:44 crc kubenswrapper[4651]: I1011 04:53:44.166890 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-jkbfd" event={"ID":"1aa4c8c0-ff6e-4683-a554-0286fb970db3","Type":"ContainerStarted","Data":"03edb972d2f987cad9be6a186aeb22814c966de88c0a0db8f6954c25cdda4d96"} Oct 11 04:53:44 crc kubenswrapper[4651]: I1011 04:53:44.174098 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-cd92z" podStartSLOduration=125.174056156 podStartE2EDuration="2m5.174056156s" podCreationTimestamp="2025-10-11 04:51:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 04:53:44.165774209 +0000 UTC m=+145.062007015" watchObservedRunningTime="2025-10-11 04:53:44.174056156 +0000 UTC m=+145.070288952" Oct 11 04:53:44 crc kubenswrapper[4651]: I1011 04:53:44.174591 4651 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-cd92z container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.31:8080/healthz\": dial tcp 10.217.0.31:8080: connect: connection refused" start-of-body= Oct 11 04:53:44 crc kubenswrapper[4651]: I1011 04:53:44.174653 4651 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-cd92z" podUID="a25ac582-d0a6-4bd7-a9c9-dbed70086212" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.31:8080/healthz\": dial tcp 10.217.0.31:8080: connect: connection refused" Oct 11 04:53:44 crc kubenswrapper[4651]: I1011 04:53:44.189701 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-f7bf8" event={"ID":"5977b2a6-9bef-4215-a7d6-602ea9be37c2","Type":"ContainerStarted","Data":"58a1d5b83b113316b26a77537e016299d1af519559df5ce8fead7a651dd525f9"} Oct 11 04:53:44 crc kubenswrapper[4651]: I1011 04:53:44.203369 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-xqg79" podStartSLOduration=125.203332592 podStartE2EDuration="2m5.203332592s" podCreationTimestamp="2025-10-11 04:51:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 04:53:44.199715317 +0000 UTC m=+145.095948123" watchObservedRunningTime="2025-10-11 04:53:44.203332592 +0000 UTC m=+145.099565398" Oct 11 04:53:44 crc kubenswrapper[4651]: I1011 04:53:44.218243 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 04:53:44 crc kubenswrapper[4651]: E1011 04:53:44.218523 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 04:53:44.718466228 +0000 UTC m=+145.614699024 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 04:53:44 crc kubenswrapper[4651]: I1011 04:53:44.218604 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-v5ktt\" (UID: \"4adc8e3f-786a-4d1b-985b-3f39cf67767a\") " pod="openshift-image-registry/image-registry-697d97f7c8-v5ktt" Oct 11 04:53:44 crc kubenswrapper[4651]: E1011 04:53:44.221619 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 04:53:44.72159161 +0000 UTC m=+145.617824406 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-v5ktt" (UID: "4adc8e3f-786a-4d1b-985b-3f39cf67767a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 04:53:44 crc kubenswrapper[4651]: I1011 04:53:44.225758 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-f7bf8" podStartSLOduration=124.225735368 podStartE2EDuration="2m4.225735368s" podCreationTimestamp="2025-10-11 04:51:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 04:53:44.224045494 +0000 UTC m=+145.120278300" watchObservedRunningTime="2025-10-11 04:53:44.225735368 +0000 UTC m=+145.121968164" Oct 11 04:53:44 crc kubenswrapper[4651]: I1011 04:53:44.229597 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xzfcs" event={"ID":"f90412cd-2f6a-4322-b3b7-29904de3b09c","Type":"ContainerStarted","Data":"93916b4aaf1d61c16449a49e21bf49dc4a2350591dd69ba8d0f85840eecf0b00"} Oct 11 04:53:44 crc kubenswrapper[4651]: I1011 04:53:44.265939 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-hf4jk" event={"ID":"29ad03db-a1b8-4cf1-b603-d9e1e61359db","Type":"ContainerStarted","Data":"9d7ff67f3c86edafea0177d3da34b317e2180d15a894aba55ae8d136d48f21e9"} Oct 11 04:53:44 crc kubenswrapper[4651]: I1011 04:53:44.266881 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-hf4jk" Oct 11 04:53:44 crc kubenswrapper[4651]: I1011 04:53:44.296449 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xzfcs" podStartSLOduration=124.296432097 podStartE2EDuration="2m4.296432097s" podCreationTimestamp="2025-10-11 04:51:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 04:53:44.296381726 +0000 UTC m=+145.192614522" watchObservedRunningTime="2025-10-11 04:53:44.296432097 +0000 UTC m=+145.192664893" Oct 11 04:53:44 crc kubenswrapper[4651]: I1011 04:53:44.306712 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-jkbfd" podStartSLOduration=125.306690766 podStartE2EDuration="2m5.306690766s" podCreationTimestamp="2025-10-11 04:51:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 04:53:44.252430847 +0000 UTC m=+145.148663643" watchObservedRunningTime="2025-10-11 04:53:44.306690766 +0000 UTC m=+145.202923562" Oct 11 04:53:44 crc kubenswrapper[4651]: I1011 04:53:44.308483 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-rgxlz" event={"ID":"391e51dc-8070-40bf-ac61-33c1ef37c72b","Type":"ContainerStarted","Data":"ba6983be67f156717147fb631921e6d80a50b8a2dc260f13b27d431c0e031a7c"} Oct 11 04:53:44 crc kubenswrapper[4651]: I1011 04:53:44.320347 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-r4vsk" event={"ID":"5b16fb63-ca55-4c40-8d92-21477dd79984","Type":"ContainerStarted","Data":"7a0bb20a03d27ad78b27340e30259ed9f647f04bded76dd6c95b549fa7434875"} Oct 11 04:53:44 crc kubenswrapper[4651]: I1011 04:53:44.320397 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-r4vsk" event={"ID":"5b16fb63-ca55-4c40-8d92-21477dd79984","Type":"ContainerStarted","Data":"8a7fd0f90269a39b461615adcb7ce3994b2faca56a7a619a8017235871bf89dc"} Oct 11 04:53:44 crc kubenswrapper[4651]: I1011 04:53:44.337294 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-hf4jk" podStartSLOduration=124.337268616 podStartE2EDuration="2m4.337268616s" podCreationTimestamp="2025-10-11 04:51:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 04:53:44.336552177 +0000 UTC m=+145.232784983" watchObservedRunningTime="2025-10-11 04:53:44.337268616 +0000 UTC m=+145.233501412" Oct 11 04:53:44 crc kubenswrapper[4651]: I1011 04:53:44.349314 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 04:53:44 crc kubenswrapper[4651]: E1011 04:53:44.351028 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 04:53:44.851011045 +0000 UTC m=+145.747243841 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 04:53:44 crc kubenswrapper[4651]: I1011 04:53:44.373761 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-2mj6l" event={"ID":"f6aa26e9-64fd-4591-9824-bb8b4ddfce0f","Type":"ContainerStarted","Data":"5054781e73b57917b8f849f4e27f99d85a9f68c053802f86f64c212c17f629e5"} Oct 11 04:53:44 crc kubenswrapper[4651]: I1011 04:53:44.400960 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-6qdd5" event={"ID":"0195bd92-5ff5-4c4b-86a6-360d9620f118","Type":"ContainerStarted","Data":"ce5d7935128dfae5b397eeaddc64b5c34f62e3380998645f5bddc5cbd67fee66"} Oct 11 04:53:44 crc kubenswrapper[4651]: I1011 04:53:44.402165 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-6qdd5" Oct 11 04:53:44 crc kubenswrapper[4651]: I1011 04:53:44.419922 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4lsqd" event={"ID":"19358ccd-5ef0-4416-81db-347114a9bbe4","Type":"ContainerStarted","Data":"2d817b8e39d7ceac34849d1930075ec72610e022f9dd61b21f545afb01347b33"} Oct 11 04:53:44 crc kubenswrapper[4651]: I1011 04:53:44.434343 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-27wgh" event={"ID":"f8fd1293-3d68-4dd2-bc12-8f7c02017bcd","Type":"ContainerStarted","Data":"781524fea5e3f2d2ce5d085cbae2ba7f1a25ec7c02e5c2d4f72f378eea0c388f"} Oct 11 04:53:44 crc kubenswrapper[4651]: I1011 04:53:44.450292 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-9jbmn" event={"ID":"56d26781-8236-4a07-9dbf-d0b926cba29a","Type":"ContainerStarted","Data":"6672b6ba84b653139543e6a0c4d5e0a8c03b473f17fe4fdc1bdcd2cc3d605c56"} Oct 11 04:53:44 crc kubenswrapper[4651]: I1011 04:53:44.455475 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-v5ktt\" (UID: \"4adc8e3f-786a-4d1b-985b-3f39cf67767a\") " pod="openshift-image-registry/image-registry-697d97f7c8-v5ktt" Oct 11 04:53:44 crc kubenswrapper[4651]: E1011 04:53:44.457532 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 04:53:44.957517742 +0000 UTC m=+145.853750538 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-v5ktt" (UID: "4adc8e3f-786a-4d1b-985b-3f39cf67767a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 04:53:44 crc kubenswrapper[4651]: I1011 04:53:44.466330 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-r4vsk" podStartSLOduration=125.466314983 podStartE2EDuration="2m5.466314983s" podCreationTimestamp="2025-10-11 04:51:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 04:53:44.452266115 +0000 UTC m=+145.348498911" watchObservedRunningTime="2025-10-11 04:53:44.466314983 +0000 UTC m=+145.362547779" Oct 11 04:53:44 crc kubenswrapper[4651]: I1011 04:53:44.484085 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-6qdd5" Oct 11 04:53:44 crc kubenswrapper[4651]: I1011 04:53:44.498917 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-7tk5h" event={"ID":"6cd78244-1fbc-4c7b-81ee-be4a2e1eda22","Type":"ContainerStarted","Data":"52a1c4ba48045a861e39c7f6356bdda3eadbf49356705d80a6452a4d5d560ac9"} Oct 11 04:53:44 crc kubenswrapper[4651]: I1011 04:53:44.501412 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-w55bl" event={"ID":"da275bab-31cd-4cf4-bb88-820b837416ec","Type":"ContainerStarted","Data":"b1c26bf5ba96f70939663fab602ef43f38cfdc9332c6b580aee91907f75e4917"} Oct 11 04:53:44 crc kubenswrapper[4651]: I1011 04:53:44.503181 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-zbqhj" event={"ID":"82548e7e-b445-4506-b42c-c9c620e82267","Type":"ContainerStarted","Data":"b81d18582715474b6be131ac98cd3f713a503117d153ac59a51d123f89b9bda5"} Oct 11 04:53:44 crc kubenswrapper[4651]: I1011 04:53:44.505328 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-97fqd" event={"ID":"6cfb505f-a515-433b-82ac-792d3b435ce1","Type":"ContainerStarted","Data":"cd644a4462c2ab759f03478b9358738cfa4f60ea26b3a2cafa18cb1d717d7162"} Oct 11 04:53:44 crc kubenswrapper[4651]: I1011 04:53:44.505366 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-97fqd" event={"ID":"6cfb505f-a515-433b-82ac-792d3b435ce1","Type":"ContainerStarted","Data":"83ef6305c19c064a5da2f9ea183c56caabc3203fcd0079b6b148911ac524f6cc"} Oct 11 04:53:44 crc kubenswrapper[4651]: I1011 04:53:44.505892 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-97fqd" Oct 11 04:53:44 crc kubenswrapper[4651]: I1011 04:53:44.524021 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-xx5gz" event={"ID":"c88b0ab7-f277-4d78-937d-9268c3e34eae","Type":"ContainerStarted","Data":"55402cc07582b8ae9c9bb9f7fce469f12c791209e018a4247c4ea1ed0788f97b"} Oct 11 04:53:44 crc kubenswrapper[4651]: I1011 04:53:44.526917 4651 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-97fqd container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.34:8443/healthz\": dial tcp 10.217.0.34:8443: connect: connection refused" start-of-body= Oct 11 04:53:44 crc kubenswrapper[4651]: I1011 04:53:44.526984 4651 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-97fqd" podUID="6cfb505f-a515-433b-82ac-792d3b435ce1" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.34:8443/healthz\": dial tcp 10.217.0.34:8443: connect: connection refused" Oct 11 04:53:44 crc kubenswrapper[4651]: I1011 04:53:44.533207 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-htjqx" event={"ID":"88ee50e1-3036-4557-8dbb-6aefcc8df336","Type":"ContainerStarted","Data":"f12c443298e56096c04ed470695d4f2da670f118b7fa9812f32326bab7556dc8"} Oct 11 04:53:44 crc kubenswrapper[4651]: I1011 04:53:44.553076 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-rgxlz" podStartSLOduration=125.553061563 podStartE2EDuration="2m5.553061563s" podCreationTimestamp="2025-10-11 04:51:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 04:53:44.496975575 +0000 UTC m=+145.393208371" watchObservedRunningTime="2025-10-11 04:53:44.553061563 +0000 UTC m=+145.449294359" Oct 11 04:53:44 crc kubenswrapper[4651]: I1011 04:53:44.553832 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-2mj6l" podStartSLOduration=124.553811612 podStartE2EDuration="2m4.553811612s" podCreationTimestamp="2025-10-11 04:51:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 04:53:44.546153532 +0000 UTC m=+145.442386328" watchObservedRunningTime="2025-10-11 04:53:44.553811612 +0000 UTC m=+145.450044408" Oct 11 04:53:44 crc kubenswrapper[4651]: I1011 04:53:44.558420 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 04:53:44 crc kubenswrapper[4651]: E1011 04:53:44.559041 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 04:53:45.058974927 +0000 UTC m=+145.955207843 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 04:53:44 crc kubenswrapper[4651]: I1011 04:53:44.559232 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-v5ktt\" (UID: \"4adc8e3f-786a-4d1b-985b-3f39cf67767a\") " pod="openshift-image-registry/image-registry-697d97f7c8-v5ktt" Oct 11 04:53:44 crc kubenswrapper[4651]: E1011 04:53:44.560594 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 04:53:45.060574849 +0000 UTC m=+145.956807645 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-v5ktt" (UID: "4adc8e3f-786a-4d1b-985b-3f39cf67767a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 04:53:44 crc kubenswrapper[4651]: I1011 04:53:44.568622 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-pc67v" event={"ID":"ccf042b9-768a-413d-bc29-58ab74c06fc9","Type":"ContainerStarted","Data":"6b64d5a6afe3aee87f84ffda80f57bb957ad4c68893ce240e5b13fd953481335"} Oct 11 04:53:44 crc kubenswrapper[4651]: I1011 04:53:44.589315 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4lsqd" podStartSLOduration=125.589298221 podStartE2EDuration="2m5.589298221s" podCreationTimestamp="2025-10-11 04:51:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 04:53:44.588918331 +0000 UTC m=+145.485151137" watchObservedRunningTime="2025-10-11 04:53:44.589298221 +0000 UTC m=+145.485531017" Oct 11 04:53:44 crc kubenswrapper[4651]: I1011 04:53:44.589413 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-k85tm" event={"ID":"05f54403-9045-4e0b-94af-636e78ba5c52","Type":"ContainerStarted","Data":"6fb3ce273fff1aff480904f296c480e550bb6efa86ce2f74691924a9d696922a"} Oct 11 04:53:44 crc kubenswrapper[4651]: I1011 04:53:44.590466 4651 patch_prober.go:28] interesting pod/downloads-7954f5f757-s9qnd container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Oct 11 04:53:44 crc kubenswrapper[4651]: I1011 04:53:44.590500 4651 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-s9qnd" podUID="5c2e6635-02f1-4869-9d20-7577116611ba" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" Oct 11 04:53:44 crc kubenswrapper[4651]: I1011 04:53:44.590555 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-2hhkn" Oct 11 04:53:44 crc kubenswrapper[4651]: I1011 04:53:44.628123 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-9jbmn" podStartSLOduration=125.628104806 podStartE2EDuration="2m5.628104806s" podCreationTimestamp="2025-10-11 04:51:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 04:53:44.626520075 +0000 UTC m=+145.522752891" watchObservedRunningTime="2025-10-11 04:53:44.628104806 +0000 UTC m=+145.524337602" Oct 11 04:53:44 crc kubenswrapper[4651]: I1011 04:53:44.642643 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-2hhkn" Oct 11 04:53:44 crc kubenswrapper[4651]: I1011 04:53:44.652096 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-6qdd5" podStartSLOduration=124.652080294 podStartE2EDuration="2m4.652080294s" podCreationTimestamp="2025-10-11 04:51:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 04:53:44.650065511 +0000 UTC m=+145.546298317" watchObservedRunningTime="2025-10-11 04:53:44.652080294 +0000 UTC m=+145.548313090" Oct 11 04:53:44 crc kubenswrapper[4651]: I1011 04:53:44.661046 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 04:53:44 crc kubenswrapper[4651]: E1011 04:53:44.661813 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 04:53:45.161796298 +0000 UTC m=+146.058029094 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 04:53:44 crc kubenswrapper[4651]: I1011 04:53:44.718612 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-27wgh" podStartSLOduration=125.718596264 podStartE2EDuration="2m5.718596264s" podCreationTimestamp="2025-10-11 04:51:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 04:53:44.67832217 +0000 UTC m=+145.574554966" watchObservedRunningTime="2025-10-11 04:53:44.718596264 +0000 UTC m=+145.614829050" Oct 11 04:53:44 crc kubenswrapper[4651]: I1011 04:53:44.745754 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-k85tm" podStartSLOduration=124.745730234 podStartE2EDuration="2m4.745730234s" podCreationTimestamp="2025-10-11 04:51:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 04:53:44.743558278 +0000 UTC m=+145.639791094" watchObservedRunningTime="2025-10-11 04:53:44.745730234 +0000 UTC m=+145.641963030" Oct 11 04:53:44 crc kubenswrapper[4651]: I1011 04:53:44.756738 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-pc67v" podStartSLOduration=125.756712302 podStartE2EDuration="2m5.756712302s" podCreationTimestamp="2025-10-11 04:51:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 04:53:44.716882499 +0000 UTC m=+145.613115315" watchObservedRunningTime="2025-10-11 04:53:44.756712302 +0000 UTC m=+145.652945098" Oct 11 04:53:44 crc kubenswrapper[4651]: I1011 04:53:44.762956 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-v5ktt\" (UID: \"4adc8e3f-786a-4d1b-985b-3f39cf67767a\") " pod="openshift-image-registry/image-registry-697d97f7c8-v5ktt" Oct 11 04:53:44 crc kubenswrapper[4651]: E1011 04:53:44.766596 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 04:53:45.26658269 +0000 UTC m=+146.162815486 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-v5ktt" (UID: "4adc8e3f-786a-4d1b-985b-3f39cf67767a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 04:53:44 crc kubenswrapper[4651]: I1011 04:53:44.817634 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-97fqd" podStartSLOduration=124.817607795 podStartE2EDuration="2m4.817607795s" podCreationTimestamp="2025-10-11 04:51:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 04:53:44.786095121 +0000 UTC m=+145.682327927" watchObservedRunningTime="2025-10-11 04:53:44.817607795 +0000 UTC m=+145.713840611" Oct 11 04:53:44 crc kubenswrapper[4651]: I1011 04:53:44.839375 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-zbqhj" podStartSLOduration=125.839352714 podStartE2EDuration="2m5.839352714s" podCreationTimestamp="2025-10-11 04:51:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 04:53:44.834912628 +0000 UTC m=+145.731145444" watchObservedRunningTime="2025-10-11 04:53:44.839352714 +0000 UTC m=+145.735585520" Oct 11 04:53:44 crc kubenswrapper[4651]: I1011 04:53:44.869361 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 04:53:44 crc kubenswrapper[4651]: E1011 04:53:44.869693 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 04:53:45.369677458 +0000 UTC m=+146.265910254 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 04:53:44 crc kubenswrapper[4651]: I1011 04:53:44.869948 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-7tk5h" podStartSLOduration=125.869925774 podStartE2EDuration="2m5.869925774s" podCreationTimestamp="2025-10-11 04:51:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 04:53:44.866449853 +0000 UTC m=+145.762682659" watchObservedRunningTime="2025-10-11 04:53:44.869925774 +0000 UTC m=+145.766158570" Oct 11 04:53:44 crc kubenswrapper[4651]: I1011 04:53:44.941888 4651 patch_prober.go:28] interesting pod/router-default-5444994796-pxhmv container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 11 04:53:44 crc kubenswrapper[4651]: [-]has-synced failed: reason withheld Oct 11 04:53:44 crc kubenswrapper[4651]: [+]process-running ok Oct 11 04:53:44 crc kubenswrapper[4651]: healthz check failed Oct 11 04:53:44 crc kubenswrapper[4651]: I1011 04:53:44.941964 4651 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-pxhmv" podUID="e011176f-c96e-4823-89f6-648d574d1ef4" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 11 04:53:44 crc kubenswrapper[4651]: I1011 04:53:44.971921 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-v5ktt\" (UID: \"4adc8e3f-786a-4d1b-985b-3f39cf67767a\") " pod="openshift-image-registry/image-registry-697d97f7c8-v5ktt" Oct 11 04:53:44 crc kubenswrapper[4651]: E1011 04:53:44.972404 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 04:53:45.472386475 +0000 UTC m=+146.368619281 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-v5ktt" (UID: "4adc8e3f-786a-4d1b-985b-3f39cf67767a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 04:53:45 crc kubenswrapper[4651]: I1011 04:53:45.073892 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 04:53:45 crc kubenswrapper[4651]: E1011 04:53:45.074714 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 04:53:45.574694193 +0000 UTC m=+146.470926989 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 04:53:45 crc kubenswrapper[4651]: I1011 04:53:45.176317 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-v5ktt\" (UID: \"4adc8e3f-786a-4d1b-985b-3f39cf67767a\") " pod="openshift-image-registry/image-registry-697d97f7c8-v5ktt" Oct 11 04:53:45 crc kubenswrapper[4651]: E1011 04:53:45.176702 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 04:53:45.676686092 +0000 UTC m=+146.572918888 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-v5ktt" (UID: "4adc8e3f-786a-4d1b-985b-3f39cf67767a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 04:53:45 crc kubenswrapper[4651]: I1011 04:53:45.266998 4651 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-hf4jk container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.39:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Oct 11 04:53:45 crc kubenswrapper[4651]: I1011 04:53:45.267059 4651 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-hf4jk" podUID="29ad03db-a1b8-4cf1-b603-d9e1e61359db" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.39:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Oct 11 04:53:45 crc kubenswrapper[4651]: I1011 04:53:45.277022 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 04:53:45 crc kubenswrapper[4651]: E1011 04:53:45.277345 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 04:53:45.777330005 +0000 UTC m=+146.673562801 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 04:53:45 crc kubenswrapper[4651]: I1011 04:53:45.378973 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-v5ktt\" (UID: \"4adc8e3f-786a-4d1b-985b-3f39cf67767a\") " pod="openshift-image-registry/image-registry-697d97f7c8-v5ktt" Oct 11 04:53:45 crc kubenswrapper[4651]: E1011 04:53:45.379380 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 04:53:45.879361885 +0000 UTC m=+146.775594741 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-v5ktt" (UID: "4adc8e3f-786a-4d1b-985b-3f39cf67767a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 04:53:45 crc kubenswrapper[4651]: I1011 04:53:45.415679 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xzfcs" Oct 11 04:53:45 crc kubenswrapper[4651]: I1011 04:53:45.415966 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xzfcs" Oct 11 04:53:45 crc kubenswrapper[4651]: I1011 04:53:45.479622 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 04:53:45 crc kubenswrapper[4651]: E1011 04:53:45.480331 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 04:53:45.980309677 +0000 UTC m=+146.876542473 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 04:53:45 crc kubenswrapper[4651]: I1011 04:53:45.483060 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xzfcs" Oct 11 04:53:45 crc kubenswrapper[4651]: I1011 04:53:45.582264 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-v5ktt\" (UID: \"4adc8e3f-786a-4d1b-985b-3f39cf67767a\") " pod="openshift-image-registry/image-registry-697d97f7c8-v5ktt" Oct 11 04:53:45 crc kubenswrapper[4651]: E1011 04:53:45.582675 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 04:53:46.082658975 +0000 UTC m=+146.978891771 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-v5ktt" (UID: "4adc8e3f-786a-4d1b-985b-3f39cf67767a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 04:53:45 crc kubenswrapper[4651]: I1011 04:53:45.594057 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-w55bl" podStartSLOduration=7.594040653 podStartE2EDuration="7.594040653s" podCreationTimestamp="2025-10-11 04:53:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 04:53:44.897759293 +0000 UTC m=+145.793992099" watchObservedRunningTime="2025-10-11 04:53:45.594040653 +0000 UTC m=+146.490273449" Oct 11 04:53:45 crc kubenswrapper[4651]: I1011 04:53:45.595175 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-7tk5h" event={"ID":"6cd78244-1fbc-4c7b-81ee-be4a2e1eda22","Type":"ContainerStarted","Data":"ad343ff990db92c1bf2cb519b7aafe0ba9a1c4f9361912d8d2d15ddf4fee1c7c"} Oct 11 04:53:45 crc kubenswrapper[4651]: I1011 04:53:45.596558 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-2lxzd" event={"ID":"5ad2068c-8217-424f-aa22-dfb57604ea05","Type":"ContainerStarted","Data":"e7c65074fd0421ba580012d2e808d93ba024f4ada3d884530cec98d5a4ee0a1d"} Oct 11 04:53:45 crc kubenswrapper[4651]: I1011 04:53:45.598078 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-h9rlq" event={"ID":"fc942eea-0c2a-474f-bc24-c6d5fb171c79","Type":"ContainerStarted","Data":"5975b174cf3ea3561a0684e72021605fd8a73a15248cdbe6774183767856bae7"} Oct 11 04:53:45 crc kubenswrapper[4651]: I1011 04:53:45.598230 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-h9rlq" Oct 11 04:53:45 crc kubenswrapper[4651]: I1011 04:53:45.601228 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-p8tts" event={"ID":"d05dd104-27f8-410b-8a71-68101c58d906","Type":"ContainerStarted","Data":"befe79069151f0a2083797825da5f4521cbbf258b4624fca0facae0c5329d9ce"} Oct 11 04:53:45 crc kubenswrapper[4651]: I1011 04:53:45.604232 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-n4hfz" event={"ID":"49755973-5d14-4c72-9858-7edca1f2c2ee","Type":"ContainerStarted","Data":"6ff7dd3d45ec3f418a278addced773fea6cff9ed12ccf22fb4119412f26e0b24"} Oct 11 04:53:45 crc kubenswrapper[4651]: I1011 04:53:45.604267 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-n4hfz" event={"ID":"49755973-5d14-4c72-9858-7edca1f2c2ee","Type":"ContainerStarted","Data":"8fd904927080b737abbdcb11b20ea0113b1c97d31988fa7540f4ae6aef60dbe2"} Oct 11 04:53:45 crc kubenswrapper[4651]: I1011 04:53:45.606314 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-2q6fr" event={"ID":"928ecea2-0af7-48bf-a442-ee6c6c86d00c","Type":"ContainerStarted","Data":"a422c5779bc09b977b208b756fdff62719a942644baa50ef3012d2a4ab393fe2"} Oct 11 04:53:45 crc kubenswrapper[4651]: I1011 04:53:45.608022 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-2mj6l" event={"ID":"f6aa26e9-64fd-4591-9824-bb8b4ddfce0f","Type":"ContainerStarted","Data":"79b70b0360718866d812c7e2234b13ccc60ea250513553dbc91dec842452753d"} Oct 11 04:53:45 crc kubenswrapper[4651]: I1011 04:53:45.610170 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-xx5gz" event={"ID":"c88b0ab7-f277-4d78-937d-9268c3e34eae","Type":"ContainerStarted","Data":"39d47780b8e2a57360bc5c65955f110a5e915ed84c9a42af61fe78cacf691b03"} Oct 11 04:53:45 crc kubenswrapper[4651]: I1011 04:53:45.610228 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-xx5gz" Oct 11 04:53:45 crc kubenswrapper[4651]: I1011 04:53:45.611068 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-8fhsm" event={"ID":"12479473-09c0-4d87-9075-0b37754123a6","Type":"ContainerStarted","Data":"b0c9e967de0967d6a7fad04eb55adf07f6a6a3de994add04544510aa7ec28cae"} Oct 11 04:53:45 crc kubenswrapper[4651]: I1011 04:53:45.612421 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-jkbfd" event={"ID":"1aa4c8c0-ff6e-4683-a554-0286fb970db3","Type":"ContainerStarted","Data":"4d02ad55731958bccb8e73d13c90c8461c7a4704e171270286fd39a35256b786"} Oct 11 04:53:45 crc kubenswrapper[4651]: I1011 04:53:45.614020 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-r4vsk" event={"ID":"5b16fb63-ca55-4c40-8d92-21477dd79984","Type":"ContainerStarted","Data":"ee0f18630781eeda6a750e6511acf8fe87c32bef3028e6131c515ac2993ea889"} Oct 11 04:53:45 crc kubenswrapper[4651]: I1011 04:53:45.615829 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-f7bf8" event={"ID":"5977b2a6-9bef-4215-a7d6-602ea9be37c2","Type":"ContainerStarted","Data":"eb5cb585695f050fda56ac3cdf4b65da79a0022811d4cb018c106508e8313f95"} Oct 11 04:53:45 crc kubenswrapper[4651]: I1011 04:53:45.617542 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-zbqhj" event={"ID":"82548e7e-b445-4506-b42c-c9c620e82267","Type":"ContainerStarted","Data":"c3bd983970861616a282117297319739298131b17b779d3c8e854d048b5fb87b"} Oct 11 04:53:45 crc kubenswrapper[4651]: I1011 04:53:45.619124 4651 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-cd92z container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.31:8080/healthz\": dial tcp 10.217.0.31:8080: connect: connection refused" start-of-body= Oct 11 04:53:45 crc kubenswrapper[4651]: I1011 04:53:45.619161 4651 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-cd92z" podUID="a25ac582-d0a6-4bd7-a9c9-dbed70086212" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.31:8080/healthz\": dial tcp 10.217.0.31:8080: connect: connection refused" Oct 11 04:53:45 crc kubenswrapper[4651]: I1011 04:53:45.619747 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-k85tm" Oct 11 04:53:45 crc kubenswrapper[4651]: I1011 04:53:45.626048 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-hf4jk" Oct 11 04:53:45 crc kubenswrapper[4651]: I1011 04:53:45.626733 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xzfcs" Oct 11 04:53:45 crc kubenswrapper[4651]: I1011 04:53:45.631777 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-97fqd" Oct 11 04:53:45 crc kubenswrapper[4651]: I1011 04:53:45.682795 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 04:53:45 crc kubenswrapper[4651]: E1011 04:53:45.682960 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 04:53:46.182930029 +0000 UTC m=+147.079162825 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 04:53:45 crc kubenswrapper[4651]: I1011 04:53:45.683112 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-v5ktt\" (UID: \"4adc8e3f-786a-4d1b-985b-3f39cf67767a\") " pod="openshift-image-registry/image-registry-697d97f7c8-v5ktt" Oct 11 04:53:45 crc kubenswrapper[4651]: E1011 04:53:45.683443 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 04:53:46.183434972 +0000 UTC m=+147.079667848 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-v5ktt" (UID: "4adc8e3f-786a-4d1b-985b-3f39cf67767a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 04:53:45 crc kubenswrapper[4651]: I1011 04:53:45.717940 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-n4hfz" Oct 11 04:53:45 crc kubenswrapper[4651]: I1011 04:53:45.718008 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-n4hfz" Oct 11 04:53:45 crc kubenswrapper[4651]: I1011 04:53:45.719174 4651 patch_prober.go:28] interesting pod/apiserver-76f77b778f-n4hfz container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="Get \"https://10.217.0.10:8443/livez\": dial tcp 10.217.0.10:8443: connect: connection refused" start-of-body= Oct 11 04:53:45 crc kubenswrapper[4651]: I1011 04:53:45.719231 4651 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-n4hfz" podUID="49755973-5d14-4c72-9858-7edca1f2c2ee" containerName="openshift-apiserver" probeResult="failure" output="Get \"https://10.217.0.10:8443/livez\": dial tcp 10.217.0.10:8443: connect: connection refused" Oct 11 04:53:45 crc kubenswrapper[4651]: I1011 04:53:45.784389 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 04:53:45 crc kubenswrapper[4651]: E1011 04:53:45.784591 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 04:53:46.284556838 +0000 UTC m=+147.180789634 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 04:53:45 crc kubenswrapper[4651]: I1011 04:53:45.791152 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-v5ktt\" (UID: \"4adc8e3f-786a-4d1b-985b-3f39cf67767a\") " pod="openshift-image-registry/image-registry-697d97f7c8-v5ktt" Oct 11 04:53:45 crc kubenswrapper[4651]: E1011 04:53:45.791771 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 04:53:46.291755297 +0000 UTC m=+147.187988093 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-v5ktt" (UID: "4adc8e3f-786a-4d1b-985b-3f39cf67767a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 04:53:45 crc kubenswrapper[4651]: I1011 04:53:45.843243 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-2lxzd" podStartSLOduration=126.843216033 podStartE2EDuration="2m6.843216033s" podCreationTimestamp="2025-10-11 04:51:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 04:53:45.707238535 +0000 UTC m=+146.603471331" watchObservedRunningTime="2025-10-11 04:53:45.843216033 +0000 UTC m=+146.739448829" Oct 11 04:53:45 crc kubenswrapper[4651]: I1011 04:53:45.910372 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 04:53:45 crc kubenswrapper[4651]: E1011 04:53:45.910802 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 04:53:46.410785932 +0000 UTC m=+147.307018728 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 04:53:45 crc kubenswrapper[4651]: I1011 04:53:45.944301 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-p8tts" podStartSLOduration=126.944286538 podStartE2EDuration="2m6.944286538s" podCreationTimestamp="2025-10-11 04:51:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 04:53:45.867216871 +0000 UTC m=+146.763449667" watchObservedRunningTime="2025-10-11 04:53:45.944286538 +0000 UTC m=+146.840519334" Oct 11 04:53:45 crc kubenswrapper[4651]: I1011 04:53:45.949054 4651 patch_prober.go:28] interesting pod/router-default-5444994796-pxhmv container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 11 04:53:45 crc kubenswrapper[4651]: [-]has-synced failed: reason withheld Oct 11 04:53:45 crc kubenswrapper[4651]: [+]process-running ok Oct 11 04:53:45 crc kubenswrapper[4651]: healthz check failed Oct 11 04:53:45 crc kubenswrapper[4651]: I1011 04:53:45.949118 4651 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-pxhmv" podUID="e011176f-c96e-4823-89f6-648d574d1ef4" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 11 04:53:46 crc kubenswrapper[4651]: I1011 04:53:46.003807 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-zqjmp" Oct 11 04:53:46 crc kubenswrapper[4651]: I1011 04:53:46.014641 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-v5ktt\" (UID: \"4adc8e3f-786a-4d1b-985b-3f39cf67767a\") " pod="openshift-image-registry/image-registry-697d97f7c8-v5ktt" Oct 11 04:53:46 crc kubenswrapper[4651]: E1011 04:53:46.014975 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 04:53:46.514963158 +0000 UTC m=+147.411195954 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-v5ktt" (UID: "4adc8e3f-786a-4d1b-985b-3f39cf67767a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 04:53:46 crc kubenswrapper[4651]: I1011 04:53:46.048665 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-vtcqh"] Oct 11 04:53:46 crc kubenswrapper[4651]: I1011 04:53:46.058077 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vtcqh" Oct 11 04:53:46 crc kubenswrapper[4651]: I1011 04:53:46.064515 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Oct 11 04:53:46 crc kubenswrapper[4651]: I1011 04:53:46.090737 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-2q6fr" podStartSLOduration=127.09071702 podStartE2EDuration="2m7.09071702s" podCreationTimestamp="2025-10-11 04:51:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 04:53:46.056166136 +0000 UTC m=+146.952398932" watchObservedRunningTime="2025-10-11 04:53:46.09071702 +0000 UTC m=+146.986949816" Oct 11 04:53:46 crc kubenswrapper[4651]: I1011 04:53:46.091652 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-vtcqh"] Oct 11 04:53:46 crc kubenswrapper[4651]: I1011 04:53:46.119524 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 04:53:46 crc kubenswrapper[4651]: I1011 04:53:46.119707 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ad64884c-e97d-4dc2-8f86-a44c537f7068-catalog-content\") pod \"certified-operators-vtcqh\" (UID: \"ad64884c-e97d-4dc2-8f86-a44c537f7068\") " pod="openshift-marketplace/certified-operators-vtcqh" Oct 11 04:53:46 crc kubenswrapper[4651]: I1011 04:53:46.119737 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-52sjt\" (UniqueName: \"kubernetes.io/projected/ad64884c-e97d-4dc2-8f86-a44c537f7068-kube-api-access-52sjt\") pod \"certified-operators-vtcqh\" (UID: \"ad64884c-e97d-4dc2-8f86-a44c537f7068\") " pod="openshift-marketplace/certified-operators-vtcqh" Oct 11 04:53:46 crc kubenswrapper[4651]: I1011 04:53:46.119886 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ad64884c-e97d-4dc2-8f86-a44c537f7068-utilities\") pod \"certified-operators-vtcqh\" (UID: \"ad64884c-e97d-4dc2-8f86-a44c537f7068\") " pod="openshift-marketplace/certified-operators-vtcqh" Oct 11 04:53:46 crc kubenswrapper[4651]: E1011 04:53:46.120057 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 04:53:46.620041027 +0000 UTC m=+147.516273823 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 04:53:46 crc kubenswrapper[4651]: I1011 04:53:46.182466 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-6gt5n"] Oct 11 04:53:46 crc kubenswrapper[4651]: I1011 04:53:46.183668 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6gt5n" Oct 11 04:53:46 crc kubenswrapper[4651]: I1011 04:53:46.196226 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-6gt5n"] Oct 11 04:53:46 crc kubenswrapper[4651]: I1011 04:53:46.196450 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Oct 11 04:53:46 crc kubenswrapper[4651]: I1011 04:53:46.196898 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-mdhlw" Oct 11 04:53:46 crc kubenswrapper[4651]: I1011 04:53:46.218295 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-h9rlq" podStartSLOduration=127.218277248 podStartE2EDuration="2m7.218277248s" podCreationTimestamp="2025-10-11 04:51:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 04:53:46.216990964 +0000 UTC m=+147.113223770" watchObservedRunningTime="2025-10-11 04:53:46.218277248 +0000 UTC m=+147.114510044" Oct 11 04:53:46 crc kubenswrapper[4651]: I1011 04:53:46.228414 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-spgvj\" (UniqueName: \"kubernetes.io/projected/24573bf5-c576-4e14-b3ac-f33e6ca99af3-kube-api-access-spgvj\") pod \"community-operators-6gt5n\" (UID: \"24573bf5-c576-4e14-b3ac-f33e6ca99af3\") " pod="openshift-marketplace/community-operators-6gt5n" Oct 11 04:53:46 crc kubenswrapper[4651]: I1011 04:53:46.228484 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ad64884c-e97d-4dc2-8f86-a44c537f7068-catalog-content\") pod \"certified-operators-vtcqh\" (UID: \"ad64884c-e97d-4dc2-8f86-a44c537f7068\") " pod="openshift-marketplace/certified-operators-vtcqh" Oct 11 04:53:46 crc kubenswrapper[4651]: I1011 04:53:46.228502 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/24573bf5-c576-4e14-b3ac-f33e6ca99af3-catalog-content\") pod \"community-operators-6gt5n\" (UID: \"24573bf5-c576-4e14-b3ac-f33e6ca99af3\") " pod="openshift-marketplace/community-operators-6gt5n" Oct 11 04:53:46 crc kubenswrapper[4651]: I1011 04:53:46.228544 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-52sjt\" (UniqueName: \"kubernetes.io/projected/ad64884c-e97d-4dc2-8f86-a44c537f7068-kube-api-access-52sjt\") pod \"certified-operators-vtcqh\" (UID: \"ad64884c-e97d-4dc2-8f86-a44c537f7068\") " pod="openshift-marketplace/certified-operators-vtcqh" Oct 11 04:53:46 crc kubenswrapper[4651]: I1011 04:53:46.228592 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-v5ktt\" (UID: \"4adc8e3f-786a-4d1b-985b-3f39cf67767a\") " pod="openshift-image-registry/image-registry-697d97f7c8-v5ktt" Oct 11 04:53:46 crc kubenswrapper[4651]: I1011 04:53:46.228769 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ad64884c-e97d-4dc2-8f86-a44c537f7068-utilities\") pod \"certified-operators-vtcqh\" (UID: \"ad64884c-e97d-4dc2-8f86-a44c537f7068\") " pod="openshift-marketplace/certified-operators-vtcqh" Oct 11 04:53:46 crc kubenswrapper[4651]: I1011 04:53:46.228795 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/24573bf5-c576-4e14-b3ac-f33e6ca99af3-utilities\") pod \"community-operators-6gt5n\" (UID: \"24573bf5-c576-4e14-b3ac-f33e6ca99af3\") " pod="openshift-marketplace/community-operators-6gt5n" Oct 11 04:53:46 crc kubenswrapper[4651]: I1011 04:53:46.229420 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ad64884c-e97d-4dc2-8f86-a44c537f7068-catalog-content\") pod \"certified-operators-vtcqh\" (UID: \"ad64884c-e97d-4dc2-8f86-a44c537f7068\") " pod="openshift-marketplace/certified-operators-vtcqh" Oct 11 04:53:46 crc kubenswrapper[4651]: E1011 04:53:46.230123 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 04:53:46.730109608 +0000 UTC m=+147.626342404 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-v5ktt" (UID: "4adc8e3f-786a-4d1b-985b-3f39cf67767a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 04:53:46 crc kubenswrapper[4651]: I1011 04:53:46.230613 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ad64884c-e97d-4dc2-8f86-a44c537f7068-utilities\") pod \"certified-operators-vtcqh\" (UID: \"ad64884c-e97d-4dc2-8f86-a44c537f7068\") " pod="openshift-marketplace/certified-operators-vtcqh" Oct 11 04:53:46 crc kubenswrapper[4651]: I1011 04:53:46.313991 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-52sjt\" (UniqueName: \"kubernetes.io/projected/ad64884c-e97d-4dc2-8f86-a44c537f7068-kube-api-access-52sjt\") pod \"certified-operators-vtcqh\" (UID: \"ad64884c-e97d-4dc2-8f86-a44c537f7068\") " pod="openshift-marketplace/certified-operators-vtcqh" Oct 11 04:53:46 crc kubenswrapper[4651]: I1011 04:53:46.314956 4651 patch_prober.go:28] interesting pod/machine-config-daemon-78jnv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 04:53:46 crc kubenswrapper[4651]: I1011 04:53:46.315004 4651 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 04:53:46 crc kubenswrapper[4651]: I1011 04:53:46.330822 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-n4hfz" podStartSLOduration=127.330801853 podStartE2EDuration="2m7.330801853s" podCreationTimestamp="2025-10-11 04:51:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 04:53:46.330555266 +0000 UTC m=+147.226788072" watchObservedRunningTime="2025-10-11 04:53:46.330801853 +0000 UTC m=+147.227034649" Oct 11 04:53:46 crc kubenswrapper[4651]: I1011 04:53:46.332147 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 04:53:46 crc kubenswrapper[4651]: I1011 04:53:46.332329 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/24573bf5-c576-4e14-b3ac-f33e6ca99af3-utilities\") pod \"community-operators-6gt5n\" (UID: \"24573bf5-c576-4e14-b3ac-f33e6ca99af3\") " pod="openshift-marketplace/community-operators-6gt5n" Oct 11 04:53:46 crc kubenswrapper[4651]: I1011 04:53:46.332394 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-spgvj\" (UniqueName: \"kubernetes.io/projected/24573bf5-c576-4e14-b3ac-f33e6ca99af3-kube-api-access-spgvj\") pod \"community-operators-6gt5n\" (UID: \"24573bf5-c576-4e14-b3ac-f33e6ca99af3\") " pod="openshift-marketplace/community-operators-6gt5n" Oct 11 04:53:46 crc kubenswrapper[4651]: I1011 04:53:46.332416 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/24573bf5-c576-4e14-b3ac-f33e6ca99af3-catalog-content\") pod \"community-operators-6gt5n\" (UID: \"24573bf5-c576-4e14-b3ac-f33e6ca99af3\") " pod="openshift-marketplace/community-operators-6gt5n" Oct 11 04:53:46 crc kubenswrapper[4651]: I1011 04:53:46.332917 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/24573bf5-c576-4e14-b3ac-f33e6ca99af3-catalog-content\") pod \"community-operators-6gt5n\" (UID: \"24573bf5-c576-4e14-b3ac-f33e6ca99af3\") " pod="openshift-marketplace/community-operators-6gt5n" Oct 11 04:53:46 crc kubenswrapper[4651]: E1011 04:53:46.333022 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 04:53:46.83300117 +0000 UTC m=+147.729233956 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 04:53:46 crc kubenswrapper[4651]: I1011 04:53:46.333243 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/24573bf5-c576-4e14-b3ac-f33e6ca99af3-utilities\") pod \"community-operators-6gt5n\" (UID: \"24573bf5-c576-4e14-b3ac-f33e6ca99af3\") " pod="openshift-marketplace/community-operators-6gt5n" Oct 11 04:53:46 crc kubenswrapper[4651]: I1011 04:53:46.374740 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-xx5gz" podStartSLOduration=8.374724792 podStartE2EDuration="8.374724792s" podCreationTimestamp="2025-10-11 04:53:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 04:53:46.373931901 +0000 UTC m=+147.270164707" watchObservedRunningTime="2025-10-11 04:53:46.374724792 +0000 UTC m=+147.270957588" Oct 11 04:53:46 crc kubenswrapper[4651]: I1011 04:53:46.376179 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vtcqh" Oct 11 04:53:46 crc kubenswrapper[4651]: I1011 04:53:46.395459 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-bv8hd"] Oct 11 04:53:46 crc kubenswrapper[4651]: I1011 04:53:46.402000 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bv8hd" Oct 11 04:53:46 crc kubenswrapper[4651]: I1011 04:53:46.407289 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-spgvj\" (UniqueName: \"kubernetes.io/projected/24573bf5-c576-4e14-b3ac-f33e6ca99af3-kube-api-access-spgvj\") pod \"community-operators-6gt5n\" (UID: \"24573bf5-c576-4e14-b3ac-f33e6ca99af3\") " pod="openshift-marketplace/community-operators-6gt5n" Oct 11 04:53:46 crc kubenswrapper[4651]: I1011 04:53:46.430654 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-bv8hd"] Oct 11 04:53:46 crc kubenswrapper[4651]: I1011 04:53:46.433486 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-v5ktt\" (UID: \"4adc8e3f-786a-4d1b-985b-3f39cf67767a\") " pod="openshift-image-registry/image-registry-697d97f7c8-v5ktt" Oct 11 04:53:46 crc kubenswrapper[4651]: E1011 04:53:46.433967 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 04:53:46.933956692 +0000 UTC m=+147.830189488 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-v5ktt" (UID: "4adc8e3f-786a-4d1b-985b-3f39cf67767a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 04:53:46 crc kubenswrapper[4651]: I1011 04:53:46.524324 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6gt5n" Oct 11 04:53:46 crc kubenswrapper[4651]: I1011 04:53:46.559687 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 04:53:46 crc kubenswrapper[4651]: I1011 04:53:46.559903 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fb3ff40a-c2bf-42d3-920b-b94751bed645-utilities\") pod \"certified-operators-bv8hd\" (UID: \"fb3ff40a-c2bf-42d3-920b-b94751bed645\") " pod="openshift-marketplace/certified-operators-bv8hd" Oct 11 04:53:46 crc kubenswrapper[4651]: E1011 04:53:46.559972 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 04:53:47.059930548 +0000 UTC m=+147.956163334 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 04:53:46 crc kubenswrapper[4651]: I1011 04:53:46.560262 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g74tz\" (UniqueName: \"kubernetes.io/projected/fb3ff40a-c2bf-42d3-920b-b94751bed645-kube-api-access-g74tz\") pod \"certified-operators-bv8hd\" (UID: \"fb3ff40a-c2bf-42d3-920b-b94751bed645\") " pod="openshift-marketplace/certified-operators-bv8hd" Oct 11 04:53:46 crc kubenswrapper[4651]: I1011 04:53:46.560332 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fb3ff40a-c2bf-42d3-920b-b94751bed645-catalog-content\") pod \"certified-operators-bv8hd\" (UID: \"fb3ff40a-c2bf-42d3-920b-b94751bed645\") " pod="openshift-marketplace/certified-operators-bv8hd" Oct 11 04:53:46 crc kubenswrapper[4651]: I1011 04:53:46.560372 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-v5ktt\" (UID: \"4adc8e3f-786a-4d1b-985b-3f39cf67767a\") " pod="openshift-image-registry/image-registry-697d97f7c8-v5ktt" Oct 11 04:53:46 crc kubenswrapper[4651]: E1011 04:53:46.560729 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 04:53:47.060719269 +0000 UTC m=+147.956952065 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-v5ktt" (UID: "4adc8e3f-786a-4d1b-985b-3f39cf67767a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 04:53:46 crc kubenswrapper[4651]: I1011 04:53:46.609936 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-7d6h5"] Oct 11 04:53:46 crc kubenswrapper[4651]: I1011 04:53:46.612070 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7d6h5" Oct 11 04:53:46 crc kubenswrapper[4651]: I1011 04:53:46.662018 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-7d6h5"] Oct 11 04:53:46 crc kubenswrapper[4651]: I1011 04:53:46.666647 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 04:53:46 crc kubenswrapper[4651]: I1011 04:53:46.666848 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dd02099e-6a9d-425a-ac83-897f19a1007d-catalog-content\") pod \"community-operators-7d6h5\" (UID: \"dd02099e-6a9d-425a-ac83-897f19a1007d\") " pod="openshift-marketplace/community-operators-7d6h5" Oct 11 04:53:46 crc kubenswrapper[4651]: I1011 04:53:46.666874 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dd02099e-6a9d-425a-ac83-897f19a1007d-utilities\") pod \"community-operators-7d6h5\" (UID: \"dd02099e-6a9d-425a-ac83-897f19a1007d\") " pod="openshift-marketplace/community-operators-7d6h5" Oct 11 04:53:46 crc kubenswrapper[4651]: I1011 04:53:46.666930 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g74tz\" (UniqueName: \"kubernetes.io/projected/fb3ff40a-c2bf-42d3-920b-b94751bed645-kube-api-access-g74tz\") pod \"certified-operators-bv8hd\" (UID: \"fb3ff40a-c2bf-42d3-920b-b94751bed645\") " pod="openshift-marketplace/certified-operators-bv8hd" Oct 11 04:53:46 crc kubenswrapper[4651]: I1011 04:53:46.666959 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fb3ff40a-c2bf-42d3-920b-b94751bed645-catalog-content\") pod \"certified-operators-bv8hd\" (UID: \"fb3ff40a-c2bf-42d3-920b-b94751bed645\") " pod="openshift-marketplace/certified-operators-bv8hd" Oct 11 04:53:46 crc kubenswrapper[4651]: I1011 04:53:46.666991 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fb3ff40a-c2bf-42d3-920b-b94751bed645-utilities\") pod \"certified-operators-bv8hd\" (UID: \"fb3ff40a-c2bf-42d3-920b-b94751bed645\") " pod="openshift-marketplace/certified-operators-bv8hd" Oct 11 04:53:46 crc kubenswrapper[4651]: I1011 04:53:46.667047 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rnndv\" (UniqueName: \"kubernetes.io/projected/dd02099e-6a9d-425a-ac83-897f19a1007d-kube-api-access-rnndv\") pod \"community-operators-7d6h5\" (UID: \"dd02099e-6a9d-425a-ac83-897f19a1007d\") " pod="openshift-marketplace/community-operators-7d6h5" Oct 11 04:53:46 crc kubenswrapper[4651]: E1011 04:53:46.667155 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 04:53:47.167140664 +0000 UTC m=+148.063373460 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 04:53:46 crc kubenswrapper[4651]: I1011 04:53:46.667741 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fb3ff40a-c2bf-42d3-920b-b94751bed645-catalog-content\") pod \"certified-operators-bv8hd\" (UID: \"fb3ff40a-c2bf-42d3-920b-b94751bed645\") " pod="openshift-marketplace/certified-operators-bv8hd" Oct 11 04:53:46 crc kubenswrapper[4651]: I1011 04:53:46.667970 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fb3ff40a-c2bf-42d3-920b-b94751bed645-utilities\") pod \"certified-operators-bv8hd\" (UID: \"fb3ff40a-c2bf-42d3-920b-b94751bed645\") " pod="openshift-marketplace/certified-operators-bv8hd" Oct 11 04:53:46 crc kubenswrapper[4651]: I1011 04:53:46.688049 4651 generic.go:334] "Generic (PLEG): container finished" podID="b0ea38e2-3e31-4208-a918-2859626f0048" containerID="99ab96f0ea2620e7a440e243341cdf40218cfd65fb90631d89fe8938009ebe36" exitCode=0 Oct 11 04:53:46 crc kubenswrapper[4651]: I1011 04:53:46.688324 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29335965-cqjjn" event={"ID":"b0ea38e2-3e31-4208-a918-2859626f0048","Type":"ContainerDied","Data":"99ab96f0ea2620e7a440e243341cdf40218cfd65fb90631d89fe8938009ebe36"} Oct 11 04:53:46 crc kubenswrapper[4651]: I1011 04:53:46.699452 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-8fhsm" event={"ID":"12479473-09c0-4d87-9075-0b37754123a6","Type":"ContainerStarted","Data":"df4a1c77efbfe7770c0cdd246e70041ebd29c5547c06c422dde24d8ffe9bc4a6"} Oct 11 04:53:46 crc kubenswrapper[4651]: I1011 04:53:46.701449 4651 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-cd92z container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.31:8080/healthz\": dial tcp 10.217.0.31:8080: connect: connection refused" start-of-body= Oct 11 04:53:46 crc kubenswrapper[4651]: I1011 04:53:46.707047 4651 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-cd92z" podUID="a25ac582-d0a6-4bd7-a9c9-dbed70086212" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.31:8080/healthz\": dial tcp 10.217.0.31:8080: connect: connection refused" Oct 11 04:53:46 crc kubenswrapper[4651]: I1011 04:53:46.718262 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g74tz\" (UniqueName: \"kubernetes.io/projected/fb3ff40a-c2bf-42d3-920b-b94751bed645-kube-api-access-g74tz\") pod \"certified-operators-bv8hd\" (UID: \"fb3ff40a-c2bf-42d3-920b-b94751bed645\") " pod="openshift-marketplace/certified-operators-bv8hd" Oct 11 04:53:46 crc kubenswrapper[4651]: I1011 04:53:46.771631 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rnndv\" (UniqueName: \"kubernetes.io/projected/dd02099e-6a9d-425a-ac83-897f19a1007d-kube-api-access-rnndv\") pod \"community-operators-7d6h5\" (UID: \"dd02099e-6a9d-425a-ac83-897f19a1007d\") " pod="openshift-marketplace/community-operators-7d6h5" Oct 11 04:53:46 crc kubenswrapper[4651]: I1011 04:53:46.771861 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dd02099e-6a9d-425a-ac83-897f19a1007d-catalog-content\") pod \"community-operators-7d6h5\" (UID: \"dd02099e-6a9d-425a-ac83-897f19a1007d\") " pod="openshift-marketplace/community-operators-7d6h5" Oct 11 04:53:46 crc kubenswrapper[4651]: I1011 04:53:46.771963 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dd02099e-6a9d-425a-ac83-897f19a1007d-utilities\") pod \"community-operators-7d6h5\" (UID: \"dd02099e-6a9d-425a-ac83-897f19a1007d\") " pod="openshift-marketplace/community-operators-7d6h5" Oct 11 04:53:46 crc kubenswrapper[4651]: I1011 04:53:46.772357 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-v5ktt\" (UID: \"4adc8e3f-786a-4d1b-985b-3f39cf67767a\") " pod="openshift-image-registry/image-registry-697d97f7c8-v5ktt" Oct 11 04:53:46 crc kubenswrapper[4651]: E1011 04:53:46.784237 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 04:53:47.284218558 +0000 UTC m=+148.180451464 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-v5ktt" (UID: "4adc8e3f-786a-4d1b-985b-3f39cf67767a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 04:53:46 crc kubenswrapper[4651]: I1011 04:53:46.785436 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dd02099e-6a9d-425a-ac83-897f19a1007d-utilities\") pod \"community-operators-7d6h5\" (UID: \"dd02099e-6a9d-425a-ac83-897f19a1007d\") " pod="openshift-marketplace/community-operators-7d6h5" Oct 11 04:53:46 crc kubenswrapper[4651]: I1011 04:53:46.785908 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dd02099e-6a9d-425a-ac83-897f19a1007d-catalog-content\") pod \"community-operators-7d6h5\" (UID: \"dd02099e-6a9d-425a-ac83-897f19a1007d\") " pod="openshift-marketplace/community-operators-7d6h5" Oct 11 04:53:46 crc kubenswrapper[4651]: I1011 04:53:46.846232 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bv8hd" Oct 11 04:53:46 crc kubenswrapper[4651]: I1011 04:53:46.860004 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rnndv\" (UniqueName: \"kubernetes.io/projected/dd02099e-6a9d-425a-ac83-897f19a1007d-kube-api-access-rnndv\") pod \"community-operators-7d6h5\" (UID: \"dd02099e-6a9d-425a-ac83-897f19a1007d\") " pod="openshift-marketplace/community-operators-7d6h5" Oct 11 04:53:46 crc kubenswrapper[4651]: I1011 04:53:46.877454 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 04:53:46 crc kubenswrapper[4651]: E1011 04:53:46.878027 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 04:53:47.378012032 +0000 UTC m=+148.274244828 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 04:53:46 crc kubenswrapper[4651]: I1011 04:53:46.950186 4651 patch_prober.go:28] interesting pod/router-default-5444994796-pxhmv container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 11 04:53:46 crc kubenswrapper[4651]: [-]has-synced failed: reason withheld Oct 11 04:53:46 crc kubenswrapper[4651]: [+]process-running ok Oct 11 04:53:46 crc kubenswrapper[4651]: healthz check failed Oct 11 04:53:46 crc kubenswrapper[4651]: I1011 04:53:46.950238 4651 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-pxhmv" podUID="e011176f-c96e-4823-89f6-648d574d1ef4" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 11 04:53:46 crc kubenswrapper[4651]: I1011 04:53:46.984632 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-v5ktt\" (UID: \"4adc8e3f-786a-4d1b-985b-3f39cf67767a\") " pod="openshift-image-registry/image-registry-697d97f7c8-v5ktt" Oct 11 04:53:46 crc kubenswrapper[4651]: E1011 04:53:46.985407 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 04:53:47.485394542 +0000 UTC m=+148.381627328 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-v5ktt" (UID: "4adc8e3f-786a-4d1b-985b-3f39cf67767a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 04:53:47 crc kubenswrapper[4651]: I1011 04:53:47.034217 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7d6h5" Oct 11 04:53:47 crc kubenswrapper[4651]: I1011 04:53:47.088419 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 04:53:47 crc kubenswrapper[4651]: E1011 04:53:47.088719 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 04:53:47.588703455 +0000 UTC m=+148.484936251 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 04:53:47 crc kubenswrapper[4651]: I1011 04:53:47.189406 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-v5ktt\" (UID: \"4adc8e3f-786a-4d1b-985b-3f39cf67767a\") " pod="openshift-image-registry/image-registry-697d97f7c8-v5ktt" Oct 11 04:53:47 crc kubenswrapper[4651]: E1011 04:53:47.189694 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 04:53:47.689684168 +0000 UTC m=+148.585916954 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-v5ktt" (UID: "4adc8e3f-786a-4d1b-985b-3f39cf67767a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 04:53:47 crc kubenswrapper[4651]: I1011 04:53:47.213542 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-6gt5n"] Oct 11 04:53:47 crc kubenswrapper[4651]: I1011 04:53:47.230616 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-vtcqh"] Oct 11 04:53:47 crc kubenswrapper[4651]: W1011 04:53:47.248369 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod24573bf5_c576_4e14_b3ac_f33e6ca99af3.slice/crio-76fa2e7548c10b5504dbb316b86be8c9354fb7e138be4d809a0bffe89c5d0b64 WatchSource:0}: Error finding container 76fa2e7548c10b5504dbb316b86be8c9354fb7e138be4d809a0bffe89c5d0b64: Status 404 returned error can't find the container with id 76fa2e7548c10b5504dbb316b86be8c9354fb7e138be4d809a0bffe89c5d0b64 Oct 11 04:53:47 crc kubenswrapper[4651]: I1011 04:53:47.296386 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 04:53:47 crc kubenswrapper[4651]: E1011 04:53:47.296530 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 04:53:47.796506243 +0000 UTC m=+148.692739039 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 04:53:47 crc kubenswrapper[4651]: I1011 04:53:47.296967 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-v5ktt\" (UID: \"4adc8e3f-786a-4d1b-985b-3f39cf67767a\") " pod="openshift-image-registry/image-registry-697d97f7c8-v5ktt" Oct 11 04:53:47 crc kubenswrapper[4651]: E1011 04:53:47.297260 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 04:53:47.797252603 +0000 UTC m=+148.693485389 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-v5ktt" (UID: "4adc8e3f-786a-4d1b-985b-3f39cf67767a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 04:53:47 crc kubenswrapper[4651]: I1011 04:53:47.350471 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-bv8hd"] Oct 11 04:53:47 crc kubenswrapper[4651]: I1011 04:53:47.399264 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 04:53:47 crc kubenswrapper[4651]: E1011 04:53:47.399710 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 04:53:47.899694753 +0000 UTC m=+148.795927539 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 04:53:47 crc kubenswrapper[4651]: I1011 04:53:47.416929 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-h9rlq" Oct 11 04:53:47 crc kubenswrapper[4651]: W1011 04:53:47.421020 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfb3ff40a_c2bf_42d3_920b_b94751bed645.slice/crio-153d669a81ada5d1a8cc827685e7bb67edc17018c253b78e33fcb2a0deae0f97 WatchSource:0}: Error finding container 153d669a81ada5d1a8cc827685e7bb67edc17018c253b78e33fcb2a0deae0f97: Status 404 returned error can't find the container with id 153d669a81ada5d1a8cc827685e7bb67edc17018c253b78e33fcb2a0deae0f97 Oct 11 04:53:47 crc kubenswrapper[4651]: I1011 04:53:47.510137 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-v5ktt\" (UID: \"4adc8e3f-786a-4d1b-985b-3f39cf67767a\") " pod="openshift-image-registry/image-registry-697d97f7c8-v5ktt" Oct 11 04:53:47 crc kubenswrapper[4651]: E1011 04:53:47.510547 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 04:53:48.010530644 +0000 UTC m=+148.906763440 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-v5ktt" (UID: "4adc8e3f-786a-4d1b-985b-3f39cf67767a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 04:53:47 crc kubenswrapper[4651]: I1011 04:53:47.617231 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 04:53:47 crc kubenswrapper[4651]: E1011 04:53:47.617949 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 04:53:48.117932324 +0000 UTC m=+149.014165120 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 04:53:47 crc kubenswrapper[4651]: I1011 04:53:47.674393 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-7d6h5"] Oct 11 04:53:47 crc kubenswrapper[4651]: W1011 04:53:47.699977 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddd02099e_6a9d_425a_ac83_897f19a1007d.slice/crio-4044e2137ee1086e952b483ffe0ef91b105650ff160800178e29b06ef7fe5e8f WatchSource:0}: Error finding container 4044e2137ee1086e952b483ffe0ef91b105650ff160800178e29b06ef7fe5e8f: Status 404 returned error can't find the container with id 4044e2137ee1086e952b483ffe0ef91b105650ff160800178e29b06ef7fe5e8f Oct 11 04:53:47 crc kubenswrapper[4651]: I1011 04:53:47.717213 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7d6h5" event={"ID":"dd02099e-6a9d-425a-ac83-897f19a1007d","Type":"ContainerStarted","Data":"4044e2137ee1086e952b483ffe0ef91b105650ff160800178e29b06ef7fe5e8f"} Oct 11 04:53:47 crc kubenswrapper[4651]: I1011 04:53:47.722354 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 04:53:47 crc kubenswrapper[4651]: I1011 04:53:47.722417 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-v5ktt\" (UID: \"4adc8e3f-786a-4d1b-985b-3f39cf67767a\") " pod="openshift-image-registry/image-registry-697d97f7c8-v5ktt" Oct 11 04:53:47 crc kubenswrapper[4651]: I1011 04:53:47.722462 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 04:53:47 crc kubenswrapper[4651]: E1011 04:53:47.723712 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 04:53:48.223699322 +0000 UTC m=+149.119932118 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-v5ktt" (UID: "4adc8e3f-786a-4d1b-985b-3f39cf67767a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 04:53:47 crc kubenswrapper[4651]: I1011 04:53:47.732794 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vtcqh" event={"ID":"ad64884c-e97d-4dc2-8f86-a44c537f7068","Type":"ContainerStarted","Data":"755ac326c480dbfff8005d26f64ee1f01fad12a3bb64333a6e02077ba553f067"} Oct 11 04:53:47 crc kubenswrapper[4651]: I1011 04:53:47.737269 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 04:53:47 crc kubenswrapper[4651]: I1011 04:53:47.746459 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6gt5n" event={"ID":"24573bf5-c576-4e14-b3ac-f33e6ca99af3","Type":"ContainerStarted","Data":"76fa2e7548c10b5504dbb316b86be8c9354fb7e138be4d809a0bffe89c5d0b64"} Oct 11 04:53:47 crc kubenswrapper[4651]: I1011 04:53:47.748077 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bv8hd" event={"ID":"fb3ff40a-c2bf-42d3-920b-b94751bed645","Type":"ContainerStarted","Data":"153d669a81ada5d1a8cc827685e7bb67edc17018c253b78e33fcb2a0deae0f97"} Oct 11 04:53:47 crc kubenswrapper[4651]: I1011 04:53:47.753036 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-8fhsm" event={"ID":"12479473-09c0-4d87-9075-0b37754123a6","Type":"ContainerStarted","Data":"b271df39ddd9359b2b22edf9e884faaafd20ab48089b0065a118551ccf185bb6"} Oct 11 04:53:47 crc kubenswrapper[4651]: I1011 04:53:47.823720 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 04:53:47 crc kubenswrapper[4651]: E1011 04:53:47.824039 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 04:53:48.324022907 +0000 UTC m=+149.220255703 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 04:53:47 crc kubenswrapper[4651]: I1011 04:53:47.824255 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 04:53:47 crc kubenswrapper[4651]: I1011 04:53:47.824450 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-v5ktt\" (UID: \"4adc8e3f-786a-4d1b-985b-3f39cf67767a\") " pod="openshift-image-registry/image-registry-697d97f7c8-v5ktt" Oct 11 04:53:47 crc kubenswrapper[4651]: I1011 04:53:47.824530 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 04:53:47 crc kubenswrapper[4651]: E1011 04:53:47.828746 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 04:53:48.328734721 +0000 UTC m=+149.224967517 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-v5ktt" (UID: "4adc8e3f-786a-4d1b-985b-3f39cf67767a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 04:53:47 crc kubenswrapper[4651]: I1011 04:53:47.833046 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 04:53:47 crc kubenswrapper[4651]: I1011 04:53:47.833624 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 04:53:47 crc kubenswrapper[4651]: I1011 04:53:47.853468 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 04:53:47 crc kubenswrapper[4651]: I1011 04:53:47.925500 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 04:53:47 crc kubenswrapper[4651]: E1011 04:53:47.925682 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 04:53:48.425660176 +0000 UTC m=+149.321892972 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 04:53:47 crc kubenswrapper[4651]: I1011 04:53:47.926019 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-v5ktt\" (UID: \"4adc8e3f-786a-4d1b-985b-3f39cf67767a\") " pod="openshift-image-registry/image-registry-697d97f7c8-v5ktt" Oct 11 04:53:47 crc kubenswrapper[4651]: E1011 04:53:47.926314 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 04:53:48.426306673 +0000 UTC m=+149.322539469 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-v5ktt" (UID: "4adc8e3f-786a-4d1b-985b-3f39cf67767a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 04:53:47 crc kubenswrapper[4651]: I1011 04:53:47.938036 4651 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Oct 11 04:53:47 crc kubenswrapper[4651]: I1011 04:53:47.939222 4651 patch_prober.go:28] interesting pod/router-default-5444994796-pxhmv container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 11 04:53:47 crc kubenswrapper[4651]: [-]has-synced failed: reason withheld Oct 11 04:53:47 crc kubenswrapper[4651]: [+]process-running ok Oct 11 04:53:47 crc kubenswrapper[4651]: healthz check failed Oct 11 04:53:47 crc kubenswrapper[4651]: I1011 04:53:47.939262 4651 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-pxhmv" podUID="e011176f-c96e-4823-89f6-648d574d1ef4" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 11 04:53:47 crc kubenswrapper[4651]: I1011 04:53:47.966920 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29335965-cqjjn" Oct 11 04:53:47 crc kubenswrapper[4651]: I1011 04:53:47.974603 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-g9dtw"] Oct 11 04:53:47 crc kubenswrapper[4651]: E1011 04:53:47.974828 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b0ea38e2-3e31-4208-a918-2859626f0048" containerName="collect-profiles" Oct 11 04:53:47 crc kubenswrapper[4651]: I1011 04:53:47.974864 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="b0ea38e2-3e31-4208-a918-2859626f0048" containerName="collect-profiles" Oct 11 04:53:47 crc kubenswrapper[4651]: I1011 04:53:47.974956 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="b0ea38e2-3e31-4208-a918-2859626f0048" containerName="collect-profiles" Oct 11 04:53:47 crc kubenswrapper[4651]: I1011 04:53:47.975612 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-g9dtw" Oct 11 04:53:47 crc kubenswrapper[4651]: I1011 04:53:47.977914 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Oct 11 04:53:47 crc kubenswrapper[4651]: I1011 04:53:47.988951 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 04:53:47 crc kubenswrapper[4651]: I1011 04:53:47.998956 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-g9dtw"] Oct 11 04:53:48 crc kubenswrapper[4651]: I1011 04:53:48.030203 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b0ea38e2-3e31-4208-a918-2859626f0048-secret-volume\") pod \"b0ea38e2-3e31-4208-a918-2859626f0048\" (UID: \"b0ea38e2-3e31-4208-a918-2859626f0048\") " Oct 11 04:53:48 crc kubenswrapper[4651]: I1011 04:53:48.030359 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b0ea38e2-3e31-4208-a918-2859626f0048-config-volume\") pod \"b0ea38e2-3e31-4208-a918-2859626f0048\" (UID: \"b0ea38e2-3e31-4208-a918-2859626f0048\") " Oct 11 04:53:48 crc kubenswrapper[4651]: I1011 04:53:48.030417 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r98nv\" (UniqueName: \"kubernetes.io/projected/b0ea38e2-3e31-4208-a918-2859626f0048-kube-api-access-r98nv\") pod \"b0ea38e2-3e31-4208-a918-2859626f0048\" (UID: \"b0ea38e2-3e31-4208-a918-2859626f0048\") " Oct 11 04:53:48 crc kubenswrapper[4651]: I1011 04:53:48.030549 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 04:53:48 crc kubenswrapper[4651]: I1011 04:53:48.030743 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b23fe5ff-6a7c-4363-b1cd-25e74ba69e7c-utilities\") pod \"redhat-marketplace-g9dtw\" (UID: \"b23fe5ff-6a7c-4363-b1cd-25e74ba69e7c\") " pod="openshift-marketplace/redhat-marketplace-g9dtw" Oct 11 04:53:48 crc kubenswrapper[4651]: I1011 04:53:48.030779 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-64r9s\" (UniqueName: \"kubernetes.io/projected/b23fe5ff-6a7c-4363-b1cd-25e74ba69e7c-kube-api-access-64r9s\") pod \"redhat-marketplace-g9dtw\" (UID: \"b23fe5ff-6a7c-4363-b1cd-25e74ba69e7c\") " pod="openshift-marketplace/redhat-marketplace-g9dtw" Oct 11 04:53:48 crc kubenswrapper[4651]: I1011 04:53:48.030861 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b23fe5ff-6a7c-4363-b1cd-25e74ba69e7c-catalog-content\") pod \"redhat-marketplace-g9dtw\" (UID: \"b23fe5ff-6a7c-4363-b1cd-25e74ba69e7c\") " pod="openshift-marketplace/redhat-marketplace-g9dtw" Oct 11 04:53:48 crc kubenswrapper[4651]: E1011 04:53:48.031084 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 04:53:48.531065314 +0000 UTC m=+149.427298110 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 04:53:48 crc kubenswrapper[4651]: I1011 04:53:48.033542 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b0ea38e2-3e31-4208-a918-2859626f0048-config-volume" (OuterVolumeSpecName: "config-volume") pod "b0ea38e2-3e31-4208-a918-2859626f0048" (UID: "b0ea38e2-3e31-4208-a918-2859626f0048"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 04:53:48 crc kubenswrapper[4651]: I1011 04:53:48.037558 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b0ea38e2-3e31-4208-a918-2859626f0048-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "b0ea38e2-3e31-4208-a918-2859626f0048" (UID: "b0ea38e2-3e31-4208-a918-2859626f0048"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 04:53:48 crc kubenswrapper[4651]: I1011 04:53:48.038281 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b0ea38e2-3e31-4208-a918-2859626f0048-kube-api-access-r98nv" (OuterVolumeSpecName: "kube-api-access-r98nv") pod "b0ea38e2-3e31-4208-a918-2859626f0048" (UID: "b0ea38e2-3e31-4208-a918-2859626f0048"). InnerVolumeSpecName "kube-api-access-r98nv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 04:53:48 crc kubenswrapper[4651]: I1011 04:53:48.053158 4651 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-10-11T04:53:47.938060251Z","Handler":null,"Name":""} Oct 11 04:53:48 crc kubenswrapper[4651]: I1011 04:53:48.065647 4651 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Oct 11 04:53:48 crc kubenswrapper[4651]: I1011 04:53:48.065690 4651 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Oct 11 04:53:48 crc kubenswrapper[4651]: I1011 04:53:48.093791 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 04:53:48 crc kubenswrapper[4651]: I1011 04:53:48.101374 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 04:53:48 crc kubenswrapper[4651]: I1011 04:53:48.133179 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-v5ktt\" (UID: \"4adc8e3f-786a-4d1b-985b-3f39cf67767a\") " pod="openshift-image-registry/image-registry-697d97f7c8-v5ktt" Oct 11 04:53:48 crc kubenswrapper[4651]: I1011 04:53:48.133238 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b23fe5ff-6a7c-4363-b1cd-25e74ba69e7c-utilities\") pod \"redhat-marketplace-g9dtw\" (UID: \"b23fe5ff-6a7c-4363-b1cd-25e74ba69e7c\") " pod="openshift-marketplace/redhat-marketplace-g9dtw" Oct 11 04:53:48 crc kubenswrapper[4651]: I1011 04:53:48.133266 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-64r9s\" (UniqueName: \"kubernetes.io/projected/b23fe5ff-6a7c-4363-b1cd-25e74ba69e7c-kube-api-access-64r9s\") pod \"redhat-marketplace-g9dtw\" (UID: \"b23fe5ff-6a7c-4363-b1cd-25e74ba69e7c\") " pod="openshift-marketplace/redhat-marketplace-g9dtw" Oct 11 04:53:48 crc kubenswrapper[4651]: I1011 04:53:48.133332 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b23fe5ff-6a7c-4363-b1cd-25e74ba69e7c-catalog-content\") pod \"redhat-marketplace-g9dtw\" (UID: \"b23fe5ff-6a7c-4363-b1cd-25e74ba69e7c\") " pod="openshift-marketplace/redhat-marketplace-g9dtw" Oct 11 04:53:48 crc kubenswrapper[4651]: I1011 04:53:48.133404 4651 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b0ea38e2-3e31-4208-a918-2859626f0048-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 11 04:53:48 crc kubenswrapper[4651]: I1011 04:53:48.133418 4651 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b0ea38e2-3e31-4208-a918-2859626f0048-config-volume\") on node \"crc\" DevicePath \"\"" Oct 11 04:53:48 crc kubenswrapper[4651]: I1011 04:53:48.133432 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r98nv\" (UniqueName: \"kubernetes.io/projected/b0ea38e2-3e31-4208-a918-2859626f0048-kube-api-access-r98nv\") on node \"crc\" DevicePath \"\"" Oct 11 04:53:48 crc kubenswrapper[4651]: I1011 04:53:48.133885 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b23fe5ff-6a7c-4363-b1cd-25e74ba69e7c-catalog-content\") pod \"redhat-marketplace-g9dtw\" (UID: \"b23fe5ff-6a7c-4363-b1cd-25e74ba69e7c\") " pod="openshift-marketplace/redhat-marketplace-g9dtw" Oct 11 04:53:48 crc kubenswrapper[4651]: I1011 04:53:48.134493 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b23fe5ff-6a7c-4363-b1cd-25e74ba69e7c-utilities\") pod \"redhat-marketplace-g9dtw\" (UID: \"b23fe5ff-6a7c-4363-b1cd-25e74ba69e7c\") " pod="openshift-marketplace/redhat-marketplace-g9dtw" Oct 11 04:53:48 crc kubenswrapper[4651]: I1011 04:53:48.137895 4651 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Oct 11 04:53:48 crc kubenswrapper[4651]: I1011 04:53:48.137938 4651 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-v5ktt\" (UID: \"4adc8e3f-786a-4d1b-985b-3f39cf67767a\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-v5ktt" Oct 11 04:53:48 crc kubenswrapper[4651]: I1011 04:53:48.152246 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-64r9s\" (UniqueName: \"kubernetes.io/projected/b23fe5ff-6a7c-4363-b1cd-25e74ba69e7c-kube-api-access-64r9s\") pod \"redhat-marketplace-g9dtw\" (UID: \"b23fe5ff-6a7c-4363-b1cd-25e74ba69e7c\") " pod="openshift-marketplace/redhat-marketplace-g9dtw" Oct 11 04:53:48 crc kubenswrapper[4651]: I1011 04:53:48.249564 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-v5ktt\" (UID: \"4adc8e3f-786a-4d1b-985b-3f39cf67767a\") " pod="openshift-image-registry/image-registry-697d97f7c8-v5ktt" Oct 11 04:53:48 crc kubenswrapper[4651]: I1011 04:53:48.310564 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-g9dtw" Oct 11 04:53:48 crc kubenswrapper[4651]: I1011 04:53:48.341353 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 04:53:48 crc kubenswrapper[4651]: I1011 04:53:48.369626 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-kpkh2"] Oct 11 04:53:48 crc kubenswrapper[4651]: I1011 04:53:48.373358 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Oct 11 04:53:48 crc kubenswrapper[4651]: I1011 04:53:48.379872 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-v5ktt" Oct 11 04:53:48 crc kubenswrapper[4651]: I1011 04:53:48.391957 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kpkh2" Oct 11 04:53:48 crc kubenswrapper[4651]: I1011 04:53:48.396741 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-kpkh2"] Oct 11 04:53:48 crc kubenswrapper[4651]: I1011 04:53:48.450524 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zrz7h\" (UniqueName: \"kubernetes.io/projected/dd3f200d-51eb-4758-b64a-10e9c8e36b65-kube-api-access-zrz7h\") pod \"redhat-marketplace-kpkh2\" (UID: \"dd3f200d-51eb-4758-b64a-10e9c8e36b65\") " pod="openshift-marketplace/redhat-marketplace-kpkh2" Oct 11 04:53:48 crc kubenswrapper[4651]: I1011 04:53:48.450583 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dd3f200d-51eb-4758-b64a-10e9c8e36b65-utilities\") pod \"redhat-marketplace-kpkh2\" (UID: \"dd3f200d-51eb-4758-b64a-10e9c8e36b65\") " pod="openshift-marketplace/redhat-marketplace-kpkh2" Oct 11 04:53:48 crc kubenswrapper[4651]: I1011 04:53:48.450613 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dd3f200d-51eb-4758-b64a-10e9c8e36b65-catalog-content\") pod \"redhat-marketplace-kpkh2\" (UID: \"dd3f200d-51eb-4758-b64a-10e9c8e36b65\") " pod="openshift-marketplace/redhat-marketplace-kpkh2" Oct 11 04:53:48 crc kubenswrapper[4651]: W1011 04:53:48.495189 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9d751cbb_f2e2_430d_9754_c882a5e924a5.slice/crio-9636fabcce56c5c1d3af80f2858b611b1d1946daff0672966fc40a20ba114597 WatchSource:0}: Error finding container 9636fabcce56c5c1d3af80f2858b611b1d1946daff0672966fc40a20ba114597: Status 404 returned error can't find the container with id 9636fabcce56c5c1d3af80f2858b611b1d1946daff0672966fc40a20ba114597 Oct 11 04:53:48 crc kubenswrapper[4651]: I1011 04:53:48.552417 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dd3f200d-51eb-4758-b64a-10e9c8e36b65-catalog-content\") pod \"redhat-marketplace-kpkh2\" (UID: \"dd3f200d-51eb-4758-b64a-10e9c8e36b65\") " pod="openshift-marketplace/redhat-marketplace-kpkh2" Oct 11 04:53:48 crc kubenswrapper[4651]: I1011 04:53:48.552489 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zrz7h\" (UniqueName: \"kubernetes.io/projected/dd3f200d-51eb-4758-b64a-10e9c8e36b65-kube-api-access-zrz7h\") pod \"redhat-marketplace-kpkh2\" (UID: \"dd3f200d-51eb-4758-b64a-10e9c8e36b65\") " pod="openshift-marketplace/redhat-marketplace-kpkh2" Oct 11 04:53:48 crc kubenswrapper[4651]: I1011 04:53:48.552541 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dd3f200d-51eb-4758-b64a-10e9c8e36b65-utilities\") pod \"redhat-marketplace-kpkh2\" (UID: \"dd3f200d-51eb-4758-b64a-10e9c8e36b65\") " pod="openshift-marketplace/redhat-marketplace-kpkh2" Oct 11 04:53:48 crc kubenswrapper[4651]: I1011 04:53:48.553037 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dd3f200d-51eb-4758-b64a-10e9c8e36b65-utilities\") pod \"redhat-marketplace-kpkh2\" (UID: \"dd3f200d-51eb-4758-b64a-10e9c8e36b65\") " pod="openshift-marketplace/redhat-marketplace-kpkh2" Oct 11 04:53:48 crc kubenswrapper[4651]: I1011 04:53:48.554351 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dd3f200d-51eb-4758-b64a-10e9c8e36b65-catalog-content\") pod \"redhat-marketplace-kpkh2\" (UID: \"dd3f200d-51eb-4758-b64a-10e9c8e36b65\") " pod="openshift-marketplace/redhat-marketplace-kpkh2" Oct 11 04:53:48 crc kubenswrapper[4651]: W1011 04:53:48.554752 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5fe485a1_e14f_4c09_b5b9_f252bc42b7e8.slice/crio-c0633a26658cc5ab0ea2694f5584ef35acffce0d317a027e45ef45286b17a0f6 WatchSource:0}: Error finding container c0633a26658cc5ab0ea2694f5584ef35acffce0d317a027e45ef45286b17a0f6: Status 404 returned error can't find the container with id c0633a26658cc5ab0ea2694f5584ef35acffce0d317a027e45ef45286b17a0f6 Oct 11 04:53:48 crc kubenswrapper[4651]: I1011 04:53:48.574686 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zrz7h\" (UniqueName: \"kubernetes.io/projected/dd3f200d-51eb-4758-b64a-10e9c8e36b65-kube-api-access-zrz7h\") pod \"redhat-marketplace-kpkh2\" (UID: \"dd3f200d-51eb-4758-b64a-10e9c8e36b65\") " pod="openshift-marketplace/redhat-marketplace-kpkh2" Oct 11 04:53:48 crc kubenswrapper[4651]: I1011 04:53:48.620735 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-g9dtw"] Oct 11 04:53:48 crc kubenswrapper[4651]: I1011 04:53:48.713851 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kpkh2" Oct 11 04:53:48 crc kubenswrapper[4651]: I1011 04:53:48.714295 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-v5ktt"] Oct 11 04:53:48 crc kubenswrapper[4651]: I1011 04:53:48.768435 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"e63fd2f4ee2bc6f9c6175b1d999d1456437d1987705522e04cda37b078a86816"} Oct 11 04:53:48 crc kubenswrapper[4651]: I1011 04:53:48.768728 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"9636fabcce56c5c1d3af80f2858b611b1d1946daff0672966fc40a20ba114597"} Oct 11 04:53:48 crc kubenswrapper[4651]: I1011 04:53:48.770263 4651 generic.go:334] "Generic (PLEG): container finished" podID="ad64884c-e97d-4dc2-8f86-a44c537f7068" containerID="8deb186874738aed7467d65fe7c04b36559ac552546fe6e7458ca26340b5f490" exitCode=0 Oct 11 04:53:48 crc kubenswrapper[4651]: I1011 04:53:48.770315 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vtcqh" event={"ID":"ad64884c-e97d-4dc2-8f86-a44c537f7068","Type":"ContainerDied","Data":"8deb186874738aed7467d65fe7c04b36559ac552546fe6e7458ca26340b5f490"} Oct 11 04:53:48 crc kubenswrapper[4651]: I1011 04:53:48.772270 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g9dtw" event={"ID":"b23fe5ff-6a7c-4363-b1cd-25e74ba69e7c","Type":"ContainerStarted","Data":"25863fc04b9afcdccf244ecc5619709e1eff7d454370eec9ad19898afced76c1"} Oct 11 04:53:48 crc kubenswrapper[4651]: I1011 04:53:48.772666 4651 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 11 04:53:48 crc kubenswrapper[4651]: I1011 04:53:48.775096 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29335965-cqjjn" event={"ID":"b0ea38e2-3e31-4208-a918-2859626f0048","Type":"ContainerDied","Data":"d9a6987d3d1a73c557c11ad55d2d39de9183a18ea971a29f08992a6997f758e5"} Oct 11 04:53:48 crc kubenswrapper[4651]: I1011 04:53:48.775151 4651 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d9a6987d3d1a73c557c11ad55d2d39de9183a18ea971a29f08992a6997f758e5" Oct 11 04:53:48 crc kubenswrapper[4651]: I1011 04:53:48.775267 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29335965-cqjjn" Oct 11 04:53:48 crc kubenswrapper[4651]: I1011 04:53:48.790412 4651 generic.go:334] "Generic (PLEG): container finished" podID="24573bf5-c576-4e14-b3ac-f33e6ca99af3" containerID="bb7e3d9d5d320cd868b383aa48ffd3078155e046637ba5f3c5d5d16f284000a2" exitCode=0 Oct 11 04:53:48 crc kubenswrapper[4651]: I1011 04:53:48.790479 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6gt5n" event={"ID":"24573bf5-c576-4e14-b3ac-f33e6ca99af3","Type":"ContainerDied","Data":"bb7e3d9d5d320cd868b383aa48ffd3078155e046637ba5f3c5d5d16f284000a2"} Oct 11 04:53:48 crc kubenswrapper[4651]: I1011 04:53:48.813746 4651 generic.go:334] "Generic (PLEG): container finished" podID="fb3ff40a-c2bf-42d3-920b-b94751bed645" containerID="20f812c2f87b0f7207eef434f0662b0e724a6bc91ca50b8c2b6054ea22b74e83" exitCode=0 Oct 11 04:53:48 crc kubenswrapper[4651]: I1011 04:53:48.813845 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bv8hd" event={"ID":"fb3ff40a-c2bf-42d3-920b-b94751bed645","Type":"ContainerDied","Data":"20f812c2f87b0f7207eef434f0662b0e724a6bc91ca50b8c2b6054ea22b74e83"} Oct 11 04:53:48 crc kubenswrapper[4651]: I1011 04:53:48.854023 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-8fhsm" event={"ID":"12479473-09c0-4d87-9075-0b37754123a6","Type":"ContainerStarted","Data":"a66f68879fda13276172b48a89d45f434fcc8b2efd10d20ee678654bbeac2890"} Oct 11 04:53:48 crc kubenswrapper[4651]: I1011 04:53:48.864085 4651 generic.go:334] "Generic (PLEG): container finished" podID="dd02099e-6a9d-425a-ac83-897f19a1007d" containerID="b4c156891aa54bf33596d476df49e8120124dd8d3766d4ae4de35f78db9415ab" exitCode=0 Oct 11 04:53:48 crc kubenswrapper[4651]: I1011 04:53:48.864961 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7d6h5" event={"ID":"dd02099e-6a9d-425a-ac83-897f19a1007d","Type":"ContainerDied","Data":"b4c156891aa54bf33596d476df49e8120124dd8d3766d4ae4de35f78db9415ab"} Oct 11 04:53:48 crc kubenswrapper[4651]: I1011 04:53:48.867509 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-v5ktt" event={"ID":"4adc8e3f-786a-4d1b-985b-3f39cf67767a","Type":"ContainerStarted","Data":"44de1d63fda97bbeeeb7f56514e89de6c106c87ee188e1587f68f1f2a536bf94"} Oct 11 04:53:48 crc kubenswrapper[4651]: I1011 04:53:48.888562 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-8fhsm" podStartSLOduration=10.888546583 podStartE2EDuration="10.888546583s" podCreationTimestamp="2025-10-11 04:53:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 04:53:48.887930547 +0000 UTC m=+149.784163343" watchObservedRunningTime="2025-10-11 04:53:48.888546583 +0000 UTC m=+149.784779379" Oct 11 04:53:48 crc kubenswrapper[4651]: I1011 04:53:48.888601 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"c93e7270a8a15b2b2bbc0c5883bcf017f8157d3ee57924807ddd8481912ffb70"} Oct 11 04:53:48 crc kubenswrapper[4651]: I1011 04:53:48.888638 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"c0633a26658cc5ab0ea2694f5584ef35acffce0d317a027e45ef45286b17a0f6"} Oct 11 04:53:48 crc kubenswrapper[4651]: I1011 04:53:48.902140 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"36dbcb32d1eacd0042aaf651beffdb1ebad0309c949f79e1d7251803f0679a14"} Oct 11 04:53:48 crc kubenswrapper[4651]: I1011 04:53:48.902202 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"f6a53ae5de1607e31bb2a669b20666a86c2dd550faf3efec3c5bc25d0b12bbc5"} Oct 11 04:53:48 crc kubenswrapper[4651]: I1011 04:53:48.902859 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 04:53:48 crc kubenswrapper[4651]: I1011 04:53:48.938113 4651 patch_prober.go:28] interesting pod/router-default-5444994796-pxhmv container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 11 04:53:48 crc kubenswrapper[4651]: [-]has-synced failed: reason withheld Oct 11 04:53:48 crc kubenswrapper[4651]: [+]process-running ok Oct 11 04:53:48 crc kubenswrapper[4651]: healthz check failed Oct 11 04:53:48 crc kubenswrapper[4651]: I1011 04:53:48.938172 4651 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-pxhmv" podUID="e011176f-c96e-4823-89f6-648d574d1ef4" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 11 04:53:49 crc kubenswrapper[4651]: I1011 04:53:49.019059 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-kpkh2"] Oct 11 04:53:49 crc kubenswrapper[4651]: W1011 04:53:49.033799 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddd3f200d_51eb_4758_b64a_10e9c8e36b65.slice/crio-d77f68057e9284c471605711818dd6093d36979c971e741949be4e2e36622688 WatchSource:0}: Error finding container d77f68057e9284c471605711818dd6093d36979c971e741949be4e2e36622688: Status 404 returned error can't find the container with id d77f68057e9284c471605711818dd6093d36979c971e741949be4e2e36622688 Oct 11 04:53:49 crc kubenswrapper[4651]: I1011 04:53:49.360329 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-drmcl"] Oct 11 04:53:49 crc kubenswrapper[4651]: I1011 04:53:49.361373 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-drmcl" Oct 11 04:53:49 crc kubenswrapper[4651]: I1011 04:53:49.363143 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Oct 11 04:53:49 crc kubenswrapper[4651]: I1011 04:53:49.373349 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-drmcl"] Oct 11 04:53:49 crc kubenswrapper[4651]: I1011 04:53:49.464667 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ac326965-3277-44c2-bd27-773586999e23-utilities\") pod \"redhat-operators-drmcl\" (UID: \"ac326965-3277-44c2-bd27-773586999e23\") " pod="openshift-marketplace/redhat-operators-drmcl" Oct 11 04:53:49 crc kubenswrapper[4651]: I1011 04:53:49.464715 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-krssl\" (UniqueName: \"kubernetes.io/projected/ac326965-3277-44c2-bd27-773586999e23-kube-api-access-krssl\") pod \"redhat-operators-drmcl\" (UID: \"ac326965-3277-44c2-bd27-773586999e23\") " pod="openshift-marketplace/redhat-operators-drmcl" Oct 11 04:53:49 crc kubenswrapper[4651]: I1011 04:53:49.464769 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ac326965-3277-44c2-bd27-773586999e23-catalog-content\") pod \"redhat-operators-drmcl\" (UID: \"ac326965-3277-44c2-bd27-773586999e23\") " pod="openshift-marketplace/redhat-operators-drmcl" Oct 11 04:53:49 crc kubenswrapper[4651]: I1011 04:53:49.566663 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ac326965-3277-44c2-bd27-773586999e23-catalog-content\") pod \"redhat-operators-drmcl\" (UID: \"ac326965-3277-44c2-bd27-773586999e23\") " pod="openshift-marketplace/redhat-operators-drmcl" Oct 11 04:53:49 crc kubenswrapper[4651]: I1011 04:53:49.566744 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ac326965-3277-44c2-bd27-773586999e23-utilities\") pod \"redhat-operators-drmcl\" (UID: \"ac326965-3277-44c2-bd27-773586999e23\") " pod="openshift-marketplace/redhat-operators-drmcl" Oct 11 04:53:49 crc kubenswrapper[4651]: I1011 04:53:49.566775 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-krssl\" (UniqueName: \"kubernetes.io/projected/ac326965-3277-44c2-bd27-773586999e23-kube-api-access-krssl\") pod \"redhat-operators-drmcl\" (UID: \"ac326965-3277-44c2-bd27-773586999e23\") " pod="openshift-marketplace/redhat-operators-drmcl" Oct 11 04:53:49 crc kubenswrapper[4651]: I1011 04:53:49.567256 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ac326965-3277-44c2-bd27-773586999e23-utilities\") pod \"redhat-operators-drmcl\" (UID: \"ac326965-3277-44c2-bd27-773586999e23\") " pod="openshift-marketplace/redhat-operators-drmcl" Oct 11 04:53:49 crc kubenswrapper[4651]: I1011 04:53:49.568299 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ac326965-3277-44c2-bd27-773586999e23-catalog-content\") pod \"redhat-operators-drmcl\" (UID: \"ac326965-3277-44c2-bd27-773586999e23\") " pod="openshift-marketplace/redhat-operators-drmcl" Oct 11 04:53:49 crc kubenswrapper[4651]: I1011 04:53:49.597459 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-krssl\" (UniqueName: \"kubernetes.io/projected/ac326965-3277-44c2-bd27-773586999e23-kube-api-access-krssl\") pod \"redhat-operators-drmcl\" (UID: \"ac326965-3277-44c2-bd27-773586999e23\") " pod="openshift-marketplace/redhat-operators-drmcl" Oct 11 04:53:49 crc kubenswrapper[4651]: I1011 04:53:49.684458 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-drmcl" Oct 11 04:53:49 crc kubenswrapper[4651]: I1011 04:53:49.772971 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-5cw4f"] Oct 11 04:53:49 crc kubenswrapper[4651]: I1011 04:53:49.774359 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5cw4f" Oct 11 04:53:49 crc kubenswrapper[4651]: I1011 04:53:49.777451 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-5cw4f"] Oct 11 04:53:49 crc kubenswrapper[4651]: I1011 04:53:49.870290 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aa37954c-dd94-44fc-86e6-8fb23c429af1-catalog-content\") pod \"redhat-operators-5cw4f\" (UID: \"aa37954c-dd94-44fc-86e6-8fb23c429af1\") " pod="openshift-marketplace/redhat-operators-5cw4f" Oct 11 04:53:49 crc kubenswrapper[4651]: I1011 04:53:49.870330 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l8npx\" (UniqueName: \"kubernetes.io/projected/aa37954c-dd94-44fc-86e6-8fb23c429af1-kube-api-access-l8npx\") pod \"redhat-operators-5cw4f\" (UID: \"aa37954c-dd94-44fc-86e6-8fb23c429af1\") " pod="openshift-marketplace/redhat-operators-5cw4f" Oct 11 04:53:49 crc kubenswrapper[4651]: I1011 04:53:49.870378 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aa37954c-dd94-44fc-86e6-8fb23c429af1-utilities\") pod \"redhat-operators-5cw4f\" (UID: \"aa37954c-dd94-44fc-86e6-8fb23c429af1\") " pod="openshift-marketplace/redhat-operators-5cw4f" Oct 11 04:53:49 crc kubenswrapper[4651]: I1011 04:53:49.882464 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Oct 11 04:53:49 crc kubenswrapper[4651]: I1011 04:53:49.939466 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Oct 11 04:53:49 crc kubenswrapper[4651]: I1011 04:53:49.940517 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 11 04:53:49 crc kubenswrapper[4651]: I1011 04:53:49.945765 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Oct 11 04:53:49 crc kubenswrapper[4651]: I1011 04:53:49.955091 4651 patch_prober.go:28] interesting pod/router-default-5444994796-pxhmv container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 11 04:53:49 crc kubenswrapper[4651]: [-]has-synced failed: reason withheld Oct 11 04:53:49 crc kubenswrapper[4651]: [+]process-running ok Oct 11 04:53:49 crc kubenswrapper[4651]: healthz check failed Oct 11 04:53:49 crc kubenswrapper[4651]: I1011 04:53:49.955145 4651 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-pxhmv" podUID="e011176f-c96e-4823-89f6-648d574d1ef4" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 11 04:53:49 crc kubenswrapper[4651]: I1011 04:53:49.955255 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Oct 11 04:53:49 crc kubenswrapper[4651]: I1011 04:53:49.955702 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Oct 11 04:53:49 crc kubenswrapper[4651]: I1011 04:53:49.968706 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-v5ktt" event={"ID":"4adc8e3f-786a-4d1b-985b-3f39cf67767a","Type":"ContainerStarted","Data":"1a9e22811661cdbe2e5722a41440d79a8b4bc220ab96da3b1dcba7c13cb56bbb"} Oct 11 04:53:49 crc kubenswrapper[4651]: I1011 04:53:49.969624 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-v5ktt" Oct 11 04:53:49 crc kubenswrapper[4651]: I1011 04:53:49.971706 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aa37954c-dd94-44fc-86e6-8fb23c429af1-catalog-content\") pod \"redhat-operators-5cw4f\" (UID: \"aa37954c-dd94-44fc-86e6-8fb23c429af1\") " pod="openshift-marketplace/redhat-operators-5cw4f" Oct 11 04:53:49 crc kubenswrapper[4651]: I1011 04:53:49.971735 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l8npx\" (UniqueName: \"kubernetes.io/projected/aa37954c-dd94-44fc-86e6-8fb23c429af1-kube-api-access-l8npx\") pod \"redhat-operators-5cw4f\" (UID: \"aa37954c-dd94-44fc-86e6-8fb23c429af1\") " pod="openshift-marketplace/redhat-operators-5cw4f" Oct 11 04:53:49 crc kubenswrapper[4651]: I1011 04:53:49.971760 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/9febfd36-febe-430d-9f97-9e3411b7155d-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"9febfd36-febe-430d-9f97-9e3411b7155d\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 11 04:53:49 crc kubenswrapper[4651]: I1011 04:53:49.971802 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aa37954c-dd94-44fc-86e6-8fb23c429af1-utilities\") pod \"redhat-operators-5cw4f\" (UID: \"aa37954c-dd94-44fc-86e6-8fb23c429af1\") " pod="openshift-marketplace/redhat-operators-5cw4f" Oct 11 04:53:49 crc kubenswrapper[4651]: I1011 04:53:49.971904 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9febfd36-febe-430d-9f97-9e3411b7155d-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"9febfd36-febe-430d-9f97-9e3411b7155d\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 11 04:53:49 crc kubenswrapper[4651]: I1011 04:53:49.972686 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aa37954c-dd94-44fc-86e6-8fb23c429af1-catalog-content\") pod \"redhat-operators-5cw4f\" (UID: \"aa37954c-dd94-44fc-86e6-8fb23c429af1\") " pod="openshift-marketplace/redhat-operators-5cw4f" Oct 11 04:53:49 crc kubenswrapper[4651]: I1011 04:53:49.973257 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aa37954c-dd94-44fc-86e6-8fb23c429af1-utilities\") pod \"redhat-operators-5cw4f\" (UID: \"aa37954c-dd94-44fc-86e6-8fb23c429af1\") " pod="openshift-marketplace/redhat-operators-5cw4f" Oct 11 04:53:49 crc kubenswrapper[4651]: I1011 04:53:49.974906 4651 generic.go:334] "Generic (PLEG): container finished" podID="b23fe5ff-6a7c-4363-b1cd-25e74ba69e7c" containerID="f204696e045b631c78308169b24f5d53b75615ed904b895ac72aa66bd28fb71e" exitCode=0 Oct 11 04:53:49 crc kubenswrapper[4651]: I1011 04:53:49.975012 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g9dtw" event={"ID":"b23fe5ff-6a7c-4363-b1cd-25e74ba69e7c","Type":"ContainerDied","Data":"f204696e045b631c78308169b24f5d53b75615ed904b895ac72aa66bd28fb71e"} Oct 11 04:53:49 crc kubenswrapper[4651]: I1011 04:53:49.981331 4651 generic.go:334] "Generic (PLEG): container finished" podID="dd3f200d-51eb-4758-b64a-10e9c8e36b65" containerID="950d2a73ced9f02ee969d66bfe89d6f830cb3dd486e0f5f9480bc6845c63120b" exitCode=0 Oct 11 04:53:49 crc kubenswrapper[4651]: I1011 04:53:49.982217 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kpkh2" event={"ID":"dd3f200d-51eb-4758-b64a-10e9c8e36b65","Type":"ContainerDied","Data":"950d2a73ced9f02ee969d66bfe89d6f830cb3dd486e0f5f9480bc6845c63120b"} Oct 11 04:53:49 crc kubenswrapper[4651]: I1011 04:53:49.982244 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kpkh2" event={"ID":"dd3f200d-51eb-4758-b64a-10e9c8e36b65","Type":"ContainerStarted","Data":"d77f68057e9284c471605711818dd6093d36979c971e741949be4e2e36622688"} Oct 11 04:53:50 crc kubenswrapper[4651]: I1011 04:53:49.997465 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l8npx\" (UniqueName: \"kubernetes.io/projected/aa37954c-dd94-44fc-86e6-8fb23c429af1-kube-api-access-l8npx\") pod \"redhat-operators-5cw4f\" (UID: \"aa37954c-dd94-44fc-86e6-8fb23c429af1\") " pod="openshift-marketplace/redhat-operators-5cw4f" Oct 11 04:53:50 crc kubenswrapper[4651]: I1011 04:53:50.012231 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-v5ktt" podStartSLOduration=131.012212927 podStartE2EDuration="2m11.012212927s" podCreationTimestamp="2025-10-11 04:51:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 04:53:49.992133222 +0000 UTC m=+150.888366048" watchObservedRunningTime="2025-10-11 04:53:50.012212927 +0000 UTC m=+150.908445723" Oct 11 04:53:50 crc kubenswrapper[4651]: I1011 04:53:50.059614 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-drmcl"] Oct 11 04:53:50 crc kubenswrapper[4651]: I1011 04:53:50.072612 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9febfd36-febe-430d-9f97-9e3411b7155d-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"9febfd36-febe-430d-9f97-9e3411b7155d\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 11 04:53:50 crc kubenswrapper[4651]: I1011 04:53:50.072719 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/9febfd36-febe-430d-9f97-9e3411b7155d-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"9febfd36-febe-430d-9f97-9e3411b7155d\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 11 04:53:50 crc kubenswrapper[4651]: W1011 04:53:50.075138 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podac326965_3277_44c2_bd27_773586999e23.slice/crio-454c3d927fbdfb6dad62e6141067385a7f807195b23c4c262365a88b5cfb6bbf WatchSource:0}: Error finding container 454c3d927fbdfb6dad62e6141067385a7f807195b23c4c262365a88b5cfb6bbf: Status 404 returned error can't find the container with id 454c3d927fbdfb6dad62e6141067385a7f807195b23c4c262365a88b5cfb6bbf Oct 11 04:53:50 crc kubenswrapper[4651]: I1011 04:53:50.075399 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/9febfd36-febe-430d-9f97-9e3411b7155d-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"9febfd36-febe-430d-9f97-9e3411b7155d\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 11 04:53:50 crc kubenswrapper[4651]: I1011 04:53:50.092095 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9febfd36-febe-430d-9f97-9e3411b7155d-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"9febfd36-febe-430d-9f97-9e3411b7155d\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 11 04:53:50 crc kubenswrapper[4651]: I1011 04:53:50.098081 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5cw4f" Oct 11 04:53:50 crc kubenswrapper[4651]: I1011 04:53:50.293341 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 11 04:53:50 crc kubenswrapper[4651]: I1011 04:53:50.548568 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Oct 11 04:53:50 crc kubenswrapper[4651]: I1011 04:53:50.560758 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-5cw4f"] Oct 11 04:53:50 crc kubenswrapper[4651]: I1011 04:53:50.671515 4651 patch_prober.go:28] interesting pod/downloads-7954f5f757-s9qnd container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Oct 11 04:53:50 crc kubenswrapper[4651]: I1011 04:53:50.671561 4651 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-s9qnd" podUID="5c2e6635-02f1-4869-9d20-7577116611ba" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" Oct 11 04:53:50 crc kubenswrapper[4651]: I1011 04:53:50.671653 4651 patch_prober.go:28] interesting pod/downloads-7954f5f757-s9qnd container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Oct 11 04:53:50 crc kubenswrapper[4651]: I1011 04:53:50.671703 4651 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-s9qnd" podUID="5c2e6635-02f1-4869-9d20-7577116611ba" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" Oct 11 04:53:50 crc kubenswrapper[4651]: I1011 04:53:50.724953 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-n4hfz" Oct 11 04:53:50 crc kubenswrapper[4651]: I1011 04:53:50.730401 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-n4hfz" Oct 11 04:53:50 crc kubenswrapper[4651]: I1011 04:53:50.933221 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-pxhmv" Oct 11 04:53:50 crc kubenswrapper[4651]: I1011 04:53:50.940270 4651 patch_prober.go:28] interesting pod/router-default-5444994796-pxhmv container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 11 04:53:50 crc kubenswrapper[4651]: [-]has-synced failed: reason withheld Oct 11 04:53:50 crc kubenswrapper[4651]: [+]process-running ok Oct 11 04:53:50 crc kubenswrapper[4651]: healthz check failed Oct 11 04:53:50 crc kubenswrapper[4651]: I1011 04:53:50.940328 4651 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-pxhmv" podUID="e011176f-c96e-4823-89f6-648d574d1ef4" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 11 04:53:50 crc kubenswrapper[4651]: E1011 04:53:50.995679 4651 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podaa37954c_dd94_44fc_86e6_8fb23c429af1.slice/crio-conmon-194b8b052443fb4027a49077094ad9abc23b6de22400e4571828348c5f09099a.scope\": RecentStats: unable to find data in memory cache]" Oct 11 04:53:50 crc kubenswrapper[4651]: I1011 04:53:50.997568 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"9febfd36-febe-430d-9f97-9e3411b7155d","Type":"ContainerStarted","Data":"a6bcaf2c39eaf5e3ea55663d60f4f844c33b4b21b3da7c473e97d7f8457882c0"} Oct 11 04:53:50 crc kubenswrapper[4651]: I1011 04:53:50.999595 4651 generic.go:334] "Generic (PLEG): container finished" podID="aa37954c-dd94-44fc-86e6-8fb23c429af1" containerID="194b8b052443fb4027a49077094ad9abc23b6de22400e4571828348c5f09099a" exitCode=0 Oct 11 04:53:50 crc kubenswrapper[4651]: I1011 04:53:50.999662 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5cw4f" event={"ID":"aa37954c-dd94-44fc-86e6-8fb23c429af1","Type":"ContainerDied","Data":"194b8b052443fb4027a49077094ad9abc23b6de22400e4571828348c5f09099a"} Oct 11 04:53:50 crc kubenswrapper[4651]: I1011 04:53:50.999724 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5cw4f" event={"ID":"aa37954c-dd94-44fc-86e6-8fb23c429af1","Type":"ContainerStarted","Data":"afa84b6a8ac2bee8921945efc894fc3f37ebc7ba463c2955c633b7d6a35dcf9e"} Oct 11 04:53:51 crc kubenswrapper[4651]: I1011 04:53:51.003184 4651 generic.go:334] "Generic (PLEG): container finished" podID="ac326965-3277-44c2-bd27-773586999e23" containerID="5a8411747437478e0d9d62a2ae3f9e0eff4c20cf706b627e858a1b350da8b7ca" exitCode=0 Oct 11 04:53:51 crc kubenswrapper[4651]: I1011 04:53:51.004130 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-drmcl" event={"ID":"ac326965-3277-44c2-bd27-773586999e23","Type":"ContainerDied","Data":"5a8411747437478e0d9d62a2ae3f9e0eff4c20cf706b627e858a1b350da8b7ca"} Oct 11 04:53:51 crc kubenswrapper[4651]: I1011 04:53:51.004151 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-drmcl" event={"ID":"ac326965-3277-44c2-bd27-773586999e23","Type":"ContainerStarted","Data":"454c3d927fbdfb6dad62e6141067385a7f807195b23c4c262365a88b5cfb6bbf"} Oct 11 04:53:51 crc kubenswrapper[4651]: I1011 04:53:51.085171 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-pc67v" Oct 11 04:53:51 crc kubenswrapper[4651]: I1011 04:53:51.085285 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-pc67v" Oct 11 04:53:51 crc kubenswrapper[4651]: I1011 04:53:51.086419 4651 patch_prober.go:28] interesting pod/console-f9d7485db-pc67v container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.13:8443/health\": dial tcp 10.217.0.13:8443: connect: connection refused" start-of-body= Oct 11 04:53:51 crc kubenswrapper[4651]: I1011 04:53:51.086673 4651 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-pc67v" podUID="ccf042b9-768a-413d-bc29-58ab74c06fc9" containerName="console" probeResult="failure" output="Get \"https://10.217.0.13:8443/health\": dial tcp 10.217.0.13:8443: connect: connection refused" Oct 11 04:53:51 crc kubenswrapper[4651]: I1011 04:53:51.119126 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Oct 11 04:53:51 crc kubenswrapper[4651]: I1011 04:53:51.119998 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 11 04:53:51 crc kubenswrapper[4651]: I1011 04:53:51.125313 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Oct 11 04:53:51 crc kubenswrapper[4651]: I1011 04:53:51.125805 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Oct 11 04:53:51 crc kubenswrapper[4651]: I1011 04:53:51.126814 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Oct 11 04:53:51 crc kubenswrapper[4651]: I1011 04:53:51.294015 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f02eb5ea-3d54-4eb5-b302-5d1a92d44921-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"f02eb5ea-3d54-4eb5-b302-5d1a92d44921\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 11 04:53:51 crc kubenswrapper[4651]: I1011 04:53:51.294060 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f02eb5ea-3d54-4eb5-b302-5d1a92d44921-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"f02eb5ea-3d54-4eb5-b302-5d1a92d44921\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 11 04:53:51 crc kubenswrapper[4651]: I1011 04:53:51.395716 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f02eb5ea-3d54-4eb5-b302-5d1a92d44921-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"f02eb5ea-3d54-4eb5-b302-5d1a92d44921\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 11 04:53:51 crc kubenswrapper[4651]: I1011 04:53:51.395855 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f02eb5ea-3d54-4eb5-b302-5d1a92d44921-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"f02eb5ea-3d54-4eb5-b302-5d1a92d44921\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 11 04:53:51 crc kubenswrapper[4651]: I1011 04:53:51.395902 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f02eb5ea-3d54-4eb5-b302-5d1a92d44921-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"f02eb5ea-3d54-4eb5-b302-5d1a92d44921\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 11 04:53:51 crc kubenswrapper[4651]: I1011 04:53:51.433150 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f02eb5ea-3d54-4eb5-b302-5d1a92d44921-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"f02eb5ea-3d54-4eb5-b302-5d1a92d44921\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 11 04:53:51 crc kubenswrapper[4651]: I1011 04:53:51.449426 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 11 04:53:51 crc kubenswrapper[4651]: I1011 04:53:51.563073 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-cd92z" Oct 11 04:53:51 crc kubenswrapper[4651]: I1011 04:53:51.892758 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Oct 11 04:53:51 crc kubenswrapper[4651]: W1011 04:53:51.912369 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-podf02eb5ea_3d54_4eb5_b302_5d1a92d44921.slice/crio-1c1aee51fba56c48861cc0100b3215d5ef76cf2ce02fb1dd8b475579cc15d4cd WatchSource:0}: Error finding container 1c1aee51fba56c48861cc0100b3215d5ef76cf2ce02fb1dd8b475579cc15d4cd: Status 404 returned error can't find the container with id 1c1aee51fba56c48861cc0100b3215d5ef76cf2ce02fb1dd8b475579cc15d4cd Oct 11 04:53:51 crc kubenswrapper[4651]: I1011 04:53:51.936812 4651 patch_prober.go:28] interesting pod/router-default-5444994796-pxhmv container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 11 04:53:51 crc kubenswrapper[4651]: [-]has-synced failed: reason withheld Oct 11 04:53:51 crc kubenswrapper[4651]: [+]process-running ok Oct 11 04:53:51 crc kubenswrapper[4651]: healthz check failed Oct 11 04:53:51 crc kubenswrapper[4651]: I1011 04:53:51.937099 4651 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-pxhmv" podUID="e011176f-c96e-4823-89f6-648d574d1ef4" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 11 04:53:52 crc kubenswrapper[4651]: I1011 04:53:52.026387 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"9febfd36-febe-430d-9f97-9e3411b7155d","Type":"ContainerStarted","Data":"f6c3df431bc89b5f036bf3d5c755275e2ed87b54edc2bacac393677475e7791f"} Oct 11 04:53:52 crc kubenswrapper[4651]: I1011 04:53:52.033929 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"f02eb5ea-3d54-4eb5-b302-5d1a92d44921","Type":"ContainerStarted","Data":"1c1aee51fba56c48861cc0100b3215d5ef76cf2ce02fb1dd8b475579cc15d4cd"} Oct 11 04:53:52 crc kubenswrapper[4651]: I1011 04:53:52.048720 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/revision-pruner-9-crc" podStartSLOduration=3.048703127 podStartE2EDuration="3.048703127s" podCreationTimestamp="2025-10-11 04:53:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 04:53:52.047515236 +0000 UTC m=+152.943748032" watchObservedRunningTime="2025-10-11 04:53:52.048703127 +0000 UTC m=+152.944935923" Oct 11 04:53:52 crc kubenswrapper[4651]: I1011 04:53:52.936871 4651 patch_prober.go:28] interesting pod/router-default-5444994796-pxhmv container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 11 04:53:52 crc kubenswrapper[4651]: [-]has-synced failed: reason withheld Oct 11 04:53:52 crc kubenswrapper[4651]: [+]process-running ok Oct 11 04:53:52 crc kubenswrapper[4651]: healthz check failed Oct 11 04:53:52 crc kubenswrapper[4651]: I1011 04:53:52.936970 4651 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-pxhmv" podUID="e011176f-c96e-4823-89f6-648d574d1ef4" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 11 04:53:53 crc kubenswrapper[4651]: I1011 04:53:53.054737 4651 generic.go:334] "Generic (PLEG): container finished" podID="9febfd36-febe-430d-9f97-9e3411b7155d" containerID="f6c3df431bc89b5f036bf3d5c755275e2ed87b54edc2bacac393677475e7791f" exitCode=0 Oct 11 04:53:53 crc kubenswrapper[4651]: I1011 04:53:53.054855 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"9febfd36-febe-430d-9f97-9e3411b7155d","Type":"ContainerDied","Data":"f6c3df431bc89b5f036bf3d5c755275e2ed87b54edc2bacac393677475e7791f"} Oct 11 04:53:53 crc kubenswrapper[4651]: I1011 04:53:53.064066 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"f02eb5ea-3d54-4eb5-b302-5d1a92d44921","Type":"ContainerStarted","Data":"20e6af395c17edebc07c2024a8b03d41ed09f5584ac5e3a79e944fdfc18ae59a"} Oct 11 04:53:53 crc kubenswrapper[4651]: I1011 04:53:53.089978 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-8-crc" podStartSLOduration=2.089958405 podStartE2EDuration="2.089958405s" podCreationTimestamp="2025-10-11 04:53:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 04:53:53.086974566 +0000 UTC m=+153.983207382" watchObservedRunningTime="2025-10-11 04:53:53.089958405 +0000 UTC m=+153.986191201" Oct 11 04:53:53 crc kubenswrapper[4651]: I1011 04:53:53.654210 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-xx5gz" Oct 11 04:53:53 crc kubenswrapper[4651]: I1011 04:53:53.936779 4651 patch_prober.go:28] interesting pod/router-default-5444994796-pxhmv container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 11 04:53:53 crc kubenswrapper[4651]: [-]has-synced failed: reason withheld Oct 11 04:53:53 crc kubenswrapper[4651]: [+]process-running ok Oct 11 04:53:53 crc kubenswrapper[4651]: healthz check failed Oct 11 04:53:53 crc kubenswrapper[4651]: I1011 04:53:53.936900 4651 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-pxhmv" podUID="e011176f-c96e-4823-89f6-648d574d1ef4" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 11 04:53:54 crc kubenswrapper[4651]: I1011 04:53:54.114011 4651 generic.go:334] "Generic (PLEG): container finished" podID="f02eb5ea-3d54-4eb5-b302-5d1a92d44921" containerID="20e6af395c17edebc07c2024a8b03d41ed09f5584ac5e3a79e944fdfc18ae59a" exitCode=0 Oct 11 04:53:54 crc kubenswrapper[4651]: I1011 04:53:54.114150 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"f02eb5ea-3d54-4eb5-b302-5d1a92d44921","Type":"ContainerDied","Data":"20e6af395c17edebc07c2024a8b03d41ed09f5584ac5e3a79e944fdfc18ae59a"} Oct 11 04:53:54 crc kubenswrapper[4651]: I1011 04:53:54.417435 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 11 04:53:54 crc kubenswrapper[4651]: I1011 04:53:54.559769 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9febfd36-febe-430d-9f97-9e3411b7155d-kube-api-access\") pod \"9febfd36-febe-430d-9f97-9e3411b7155d\" (UID: \"9febfd36-febe-430d-9f97-9e3411b7155d\") " Oct 11 04:53:54 crc kubenswrapper[4651]: I1011 04:53:54.559938 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/9febfd36-febe-430d-9f97-9e3411b7155d-kubelet-dir\") pod \"9febfd36-febe-430d-9f97-9e3411b7155d\" (UID: \"9febfd36-febe-430d-9f97-9e3411b7155d\") " Oct 11 04:53:54 crc kubenswrapper[4651]: I1011 04:53:54.560595 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9febfd36-febe-430d-9f97-9e3411b7155d-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "9febfd36-febe-430d-9f97-9e3411b7155d" (UID: "9febfd36-febe-430d-9f97-9e3411b7155d"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 11 04:53:54 crc kubenswrapper[4651]: I1011 04:53:54.576109 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9febfd36-febe-430d-9f97-9e3411b7155d-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "9febfd36-febe-430d-9f97-9e3411b7155d" (UID: "9febfd36-febe-430d-9f97-9e3411b7155d"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 04:53:54 crc kubenswrapper[4651]: I1011 04:53:54.661789 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9febfd36-febe-430d-9f97-9e3411b7155d-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 11 04:53:54 crc kubenswrapper[4651]: I1011 04:53:54.661834 4651 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/9febfd36-febe-430d-9f97-9e3411b7155d-kubelet-dir\") on node \"crc\" DevicePath \"\"" Oct 11 04:53:54 crc kubenswrapper[4651]: I1011 04:53:54.944005 4651 patch_prober.go:28] interesting pod/router-default-5444994796-pxhmv container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 11 04:53:54 crc kubenswrapper[4651]: [-]has-synced failed: reason withheld Oct 11 04:53:54 crc kubenswrapper[4651]: [+]process-running ok Oct 11 04:53:54 crc kubenswrapper[4651]: healthz check failed Oct 11 04:53:54 crc kubenswrapper[4651]: I1011 04:53:54.944059 4651 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-pxhmv" podUID="e011176f-c96e-4823-89f6-648d574d1ef4" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 11 04:53:55 crc kubenswrapper[4651]: I1011 04:53:55.134435 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"9febfd36-febe-430d-9f97-9e3411b7155d","Type":"ContainerDied","Data":"a6bcaf2c39eaf5e3ea55663d60f4f844c33b4b21b3da7c473e97d7f8457882c0"} Oct 11 04:53:55 crc kubenswrapper[4651]: I1011 04:53:55.134497 4651 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a6bcaf2c39eaf5e3ea55663d60f4f844c33b4b21b3da7c473e97d7f8457882c0" Oct 11 04:53:55 crc kubenswrapper[4651]: I1011 04:53:55.135394 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 11 04:53:55 crc kubenswrapper[4651]: I1011 04:53:55.246847 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" Oct 11 04:53:55 crc kubenswrapper[4651]: I1011 04:53:55.522716 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 11 04:53:55 crc kubenswrapper[4651]: I1011 04:53:55.593063 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f02eb5ea-3d54-4eb5-b302-5d1a92d44921-kubelet-dir\") pod \"f02eb5ea-3d54-4eb5-b302-5d1a92d44921\" (UID: \"f02eb5ea-3d54-4eb5-b302-5d1a92d44921\") " Oct 11 04:53:55 crc kubenswrapper[4651]: I1011 04:53:55.593127 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f02eb5ea-3d54-4eb5-b302-5d1a92d44921-kube-api-access\") pod \"f02eb5ea-3d54-4eb5-b302-5d1a92d44921\" (UID: \"f02eb5ea-3d54-4eb5-b302-5d1a92d44921\") " Oct 11 04:53:55 crc kubenswrapper[4651]: I1011 04:53:55.594268 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f02eb5ea-3d54-4eb5-b302-5d1a92d44921-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "f02eb5ea-3d54-4eb5-b302-5d1a92d44921" (UID: "f02eb5ea-3d54-4eb5-b302-5d1a92d44921"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 11 04:53:55 crc kubenswrapper[4651]: I1011 04:53:55.598244 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f02eb5ea-3d54-4eb5-b302-5d1a92d44921-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "f02eb5ea-3d54-4eb5-b302-5d1a92d44921" (UID: "f02eb5ea-3d54-4eb5-b302-5d1a92d44921"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 04:53:55 crc kubenswrapper[4651]: I1011 04:53:55.696779 4651 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f02eb5ea-3d54-4eb5-b302-5d1a92d44921-kubelet-dir\") on node \"crc\" DevicePath \"\"" Oct 11 04:53:55 crc kubenswrapper[4651]: I1011 04:53:55.696815 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f02eb5ea-3d54-4eb5-b302-5d1a92d44921-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 11 04:53:55 crc kubenswrapper[4651]: I1011 04:53:55.936715 4651 patch_prober.go:28] interesting pod/router-default-5444994796-pxhmv container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 11 04:53:55 crc kubenswrapper[4651]: [-]has-synced failed: reason withheld Oct 11 04:53:55 crc kubenswrapper[4651]: [+]process-running ok Oct 11 04:53:55 crc kubenswrapper[4651]: healthz check failed Oct 11 04:53:55 crc kubenswrapper[4651]: I1011 04:53:55.936767 4651 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-pxhmv" podUID="e011176f-c96e-4823-89f6-648d574d1ef4" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 11 04:53:56 crc kubenswrapper[4651]: I1011 04:53:56.145216 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"f02eb5ea-3d54-4eb5-b302-5d1a92d44921","Type":"ContainerDied","Data":"1c1aee51fba56c48861cc0100b3215d5ef76cf2ce02fb1dd8b475579cc15d4cd"} Oct 11 04:53:56 crc kubenswrapper[4651]: I1011 04:53:56.145321 4651 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1c1aee51fba56c48861cc0100b3215d5ef76cf2ce02fb1dd8b475579cc15d4cd" Oct 11 04:53:56 crc kubenswrapper[4651]: I1011 04:53:56.145384 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 11 04:53:56 crc kubenswrapper[4651]: I1011 04:53:56.936231 4651 patch_prober.go:28] interesting pod/router-default-5444994796-pxhmv container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 11 04:53:56 crc kubenswrapper[4651]: [-]has-synced failed: reason withheld Oct 11 04:53:56 crc kubenswrapper[4651]: [+]process-running ok Oct 11 04:53:56 crc kubenswrapper[4651]: healthz check failed Oct 11 04:53:56 crc kubenswrapper[4651]: I1011 04:53:56.936589 4651 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-pxhmv" podUID="e011176f-c96e-4823-89f6-648d574d1ef4" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 11 04:53:57 crc kubenswrapper[4651]: I1011 04:53:57.937105 4651 patch_prober.go:28] interesting pod/router-default-5444994796-pxhmv container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 11 04:53:57 crc kubenswrapper[4651]: [-]has-synced failed: reason withheld Oct 11 04:53:57 crc kubenswrapper[4651]: [+]process-running ok Oct 11 04:53:57 crc kubenswrapper[4651]: healthz check failed Oct 11 04:53:57 crc kubenswrapper[4651]: I1011 04:53:57.937178 4651 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-pxhmv" podUID="e011176f-c96e-4823-89f6-648d574d1ef4" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 11 04:53:58 crc kubenswrapper[4651]: I1011 04:53:58.936187 4651 patch_prober.go:28] interesting pod/router-default-5444994796-pxhmv container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 11 04:53:58 crc kubenswrapper[4651]: [-]has-synced failed: reason withheld Oct 11 04:53:58 crc kubenswrapper[4651]: [+]process-running ok Oct 11 04:53:58 crc kubenswrapper[4651]: healthz check failed Oct 11 04:53:58 crc kubenswrapper[4651]: I1011 04:53:58.936250 4651 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-pxhmv" podUID="e011176f-c96e-4823-89f6-648d574d1ef4" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 11 04:53:59 crc kubenswrapper[4651]: I1011 04:53:59.936924 4651 patch_prober.go:28] interesting pod/router-default-5444994796-pxhmv container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 11 04:53:59 crc kubenswrapper[4651]: [-]has-synced failed: reason withheld Oct 11 04:53:59 crc kubenswrapper[4651]: [+]process-running ok Oct 11 04:53:59 crc kubenswrapper[4651]: healthz check failed Oct 11 04:53:59 crc kubenswrapper[4651]: I1011 04:53:59.937305 4651 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-pxhmv" podUID="e011176f-c96e-4823-89f6-648d574d1ef4" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 11 04:54:00 crc kubenswrapper[4651]: I1011 04:54:00.676638 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-s9qnd" Oct 11 04:54:00 crc kubenswrapper[4651]: I1011 04:54:00.936040 4651 patch_prober.go:28] interesting pod/router-default-5444994796-pxhmv container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 11 04:54:00 crc kubenswrapper[4651]: [-]has-synced failed: reason withheld Oct 11 04:54:00 crc kubenswrapper[4651]: [+]process-running ok Oct 11 04:54:00 crc kubenswrapper[4651]: healthz check failed Oct 11 04:54:00 crc kubenswrapper[4651]: I1011 04:54:00.936092 4651 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-pxhmv" podUID="e011176f-c96e-4823-89f6-648d574d1ef4" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 11 04:54:01 crc kubenswrapper[4651]: I1011 04:54:01.084373 4651 patch_prober.go:28] interesting pod/console-f9d7485db-pc67v container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.13:8443/health\": dial tcp 10.217.0.13:8443: connect: connection refused" start-of-body= Oct 11 04:54:01 crc kubenswrapper[4651]: I1011 04:54:01.084761 4651 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-pc67v" podUID="ccf042b9-768a-413d-bc29-58ab74c06fc9" containerName="console" probeResult="failure" output="Get \"https://10.217.0.13:8443/health\": dial tcp 10.217.0.13:8443: connect: connection refused" Oct 11 04:54:01 crc kubenswrapper[4651]: I1011 04:54:01.578625 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a551fed8-58fb-48ae-88af-8dc0cb48fc30-metrics-certs\") pod \"network-metrics-daemon-tgvv8\" (UID: \"a551fed8-58fb-48ae-88af-8dc0cb48fc30\") " pod="openshift-multus/network-metrics-daemon-tgvv8" Oct 11 04:54:01 crc kubenswrapper[4651]: I1011 04:54:01.584754 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a551fed8-58fb-48ae-88af-8dc0cb48fc30-metrics-certs\") pod \"network-metrics-daemon-tgvv8\" (UID: \"a551fed8-58fb-48ae-88af-8dc0cb48fc30\") " pod="openshift-multus/network-metrics-daemon-tgvv8" Oct 11 04:54:01 crc kubenswrapper[4651]: I1011 04:54:01.798017 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-tgvv8" Oct 11 04:54:01 crc kubenswrapper[4651]: I1011 04:54:01.936667 4651 patch_prober.go:28] interesting pod/router-default-5444994796-pxhmv container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 11 04:54:01 crc kubenswrapper[4651]: [-]has-synced failed: reason withheld Oct 11 04:54:01 crc kubenswrapper[4651]: [+]process-running ok Oct 11 04:54:01 crc kubenswrapper[4651]: healthz check failed Oct 11 04:54:01 crc kubenswrapper[4651]: I1011 04:54:01.936765 4651 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-pxhmv" podUID="e011176f-c96e-4823-89f6-648d574d1ef4" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 11 04:54:02 crc kubenswrapper[4651]: I1011 04:54:02.936307 4651 patch_prober.go:28] interesting pod/router-default-5444994796-pxhmv container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 11 04:54:02 crc kubenswrapper[4651]: [-]has-synced failed: reason withheld Oct 11 04:54:02 crc kubenswrapper[4651]: [+]process-running ok Oct 11 04:54:02 crc kubenswrapper[4651]: healthz check failed Oct 11 04:54:02 crc kubenswrapper[4651]: I1011 04:54:02.937052 4651 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-pxhmv" podUID="e011176f-c96e-4823-89f6-648d574d1ef4" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 11 04:54:03 crc kubenswrapper[4651]: I1011 04:54:03.939777 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-pxhmv" Oct 11 04:54:03 crc kubenswrapper[4651]: I1011 04:54:03.944217 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-pxhmv" Oct 11 04:54:08 crc kubenswrapper[4651]: I1011 04:54:08.390766 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-v5ktt" Oct 11 04:54:11 crc kubenswrapper[4651]: I1011 04:54:11.092389 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-pc67v" Oct 11 04:54:11 crc kubenswrapper[4651]: I1011 04:54:11.100369 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-pc67v" Oct 11 04:54:16 crc kubenswrapper[4651]: I1011 04:54:16.310534 4651 patch_prober.go:28] interesting pod/machine-config-daemon-78jnv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 04:54:16 crc kubenswrapper[4651]: I1011 04:54:16.310848 4651 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 04:54:18 crc kubenswrapper[4651]: I1011 04:54:18.261242 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 04:54:19 crc kubenswrapper[4651]: E1011 04:54:19.571789 4651 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Oct 11 04:54:19 crc kubenswrapper[4651]: E1011 04:54:19.572013 4651 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-krssl,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-drmcl_openshift-marketplace(ac326965-3277-44c2-bd27-773586999e23): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 11 04:54:19 crc kubenswrapper[4651]: E1011 04:54:19.573226 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-drmcl" podUID="ac326965-3277-44c2-bd27-773586999e23" Oct 11 04:54:21 crc kubenswrapper[4651]: I1011 04:54:21.572096 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-k85tm" Oct 11 04:54:23 crc kubenswrapper[4651]: E1011 04:54:23.122730 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-drmcl" podUID="ac326965-3277-44c2-bd27-773586999e23" Oct 11 04:54:24 crc kubenswrapper[4651]: E1011 04:54:24.512045 4651 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Oct 11 04:54:24 crc kubenswrapper[4651]: E1011 04:54:24.512188 4651 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-zrz7h,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-kpkh2_openshift-marketplace(dd3f200d-51eb-4758-b64a-10e9c8e36b65): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 11 04:54:24 crc kubenswrapper[4651]: E1011 04:54:24.513329 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-kpkh2" podUID="dd3f200d-51eb-4758-b64a-10e9c8e36b65" Oct 11 04:54:26 crc kubenswrapper[4651]: E1011 04:54:26.879591 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-kpkh2" podUID="dd3f200d-51eb-4758-b64a-10e9c8e36b65" Oct 11 04:54:26 crc kubenswrapper[4651]: E1011 04:54:26.968537 4651 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Oct 11 04:54:26 crc kubenswrapper[4651]: E1011 04:54:26.969098 4651 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-spgvj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-6gt5n_openshift-marketplace(24573bf5-c576-4e14-b3ac-f33e6ca99af3): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 11 04:54:26 crc kubenswrapper[4651]: E1011 04:54:26.970309 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-6gt5n" podUID="24573bf5-c576-4e14-b3ac-f33e6ca99af3" Oct 11 04:54:28 crc kubenswrapper[4651]: E1011 04:54:28.299585 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-6gt5n" podUID="24573bf5-c576-4e14-b3ac-f33e6ca99af3" Oct 11 04:54:28 crc kubenswrapper[4651]: E1011 04:54:28.388991 4651 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Oct 11 04:54:28 crc kubenswrapper[4651]: E1011 04:54:28.389136 4651 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-64r9s,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-g9dtw_openshift-marketplace(b23fe5ff-6a7c-4363-b1cd-25e74ba69e7c): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 11 04:54:28 crc kubenswrapper[4651]: E1011 04:54:28.390269 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-g9dtw" podUID="b23fe5ff-6a7c-4363-b1cd-25e74ba69e7c" Oct 11 04:54:28 crc kubenswrapper[4651]: E1011 04:54:28.403855 4651 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Oct 11 04:54:28 crc kubenswrapper[4651]: E1011 04:54:28.404014 4651 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-52sjt,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-vtcqh_openshift-marketplace(ad64884c-e97d-4dc2-8f86-a44c537f7068): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 11 04:54:28 crc kubenswrapper[4651]: E1011 04:54:28.405194 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-vtcqh" podUID="ad64884c-e97d-4dc2-8f86-a44c537f7068" Oct 11 04:54:28 crc kubenswrapper[4651]: E1011 04:54:28.416917 4651 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Oct 11 04:54:28 crc kubenswrapper[4651]: E1011 04:54:28.417070 4651 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-rnndv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-7d6h5_openshift-marketplace(dd02099e-6a9d-425a-ac83-897f19a1007d): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 11 04:54:28 crc kubenswrapper[4651]: E1011 04:54:28.418289 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-7d6h5" podUID="dd02099e-6a9d-425a-ac83-897f19a1007d" Oct 11 04:54:28 crc kubenswrapper[4651]: E1011 04:54:28.419196 4651 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Oct 11 04:54:28 crc kubenswrapper[4651]: E1011 04:54:28.419357 4651 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-l8npx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-5cw4f_openshift-marketplace(aa37954c-dd94-44fc-86e6-8fb23c429af1): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 11 04:54:28 crc kubenswrapper[4651]: E1011 04:54:28.420761 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-5cw4f" podUID="aa37954c-dd94-44fc-86e6-8fb23c429af1" Oct 11 04:54:28 crc kubenswrapper[4651]: E1011 04:54:28.436270 4651 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Oct 11 04:54:28 crc kubenswrapper[4651]: E1011 04:54:28.436410 4651 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-g74tz,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-bv8hd_openshift-marketplace(fb3ff40a-c2bf-42d3-920b-b94751bed645): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 11 04:54:28 crc kubenswrapper[4651]: E1011 04:54:28.438172 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-bv8hd" podUID="fb3ff40a-c2bf-42d3-920b-b94751bed645" Oct 11 04:54:28 crc kubenswrapper[4651]: I1011 04:54:28.693419 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-tgvv8"] Oct 11 04:54:28 crc kubenswrapper[4651]: W1011 04:54:28.705618 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda551fed8_58fb_48ae_88af_8dc0cb48fc30.slice/crio-06d49189bb7102008fe2db2bf34afba7f9022ad0e3e8a8e90eb14c8c32b3390f WatchSource:0}: Error finding container 06d49189bb7102008fe2db2bf34afba7f9022ad0e3e8a8e90eb14c8c32b3390f: Status 404 returned error can't find the container with id 06d49189bb7102008fe2db2bf34afba7f9022ad0e3e8a8e90eb14c8c32b3390f Oct 11 04:54:29 crc kubenswrapper[4651]: I1011 04:54:29.356240 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-tgvv8" event={"ID":"a551fed8-58fb-48ae-88af-8dc0cb48fc30","Type":"ContainerStarted","Data":"a61b61ec01a2015884e8ae9259bd76c1b632fb1a5207a5d5fdd51de53c199b6c"} Oct 11 04:54:29 crc kubenswrapper[4651]: I1011 04:54:29.356286 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-tgvv8" event={"ID":"a551fed8-58fb-48ae-88af-8dc0cb48fc30","Type":"ContainerStarted","Data":"81e7e36e14265119a8062a1e081e45b5af98b1f7b35b0b760418e9fd3f7b2aae"} Oct 11 04:54:29 crc kubenswrapper[4651]: I1011 04:54:29.356302 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-tgvv8" event={"ID":"a551fed8-58fb-48ae-88af-8dc0cb48fc30","Type":"ContainerStarted","Data":"06d49189bb7102008fe2db2bf34afba7f9022ad0e3e8a8e90eb14c8c32b3390f"} Oct 11 04:54:29 crc kubenswrapper[4651]: E1011 04:54:29.360007 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-g9dtw" podUID="b23fe5ff-6a7c-4363-b1cd-25e74ba69e7c" Oct 11 04:54:29 crc kubenswrapper[4651]: E1011 04:54:29.360061 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-vtcqh" podUID="ad64884c-e97d-4dc2-8f86-a44c537f7068" Oct 11 04:54:29 crc kubenswrapper[4651]: E1011 04:54:29.360031 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-7d6h5" podUID="dd02099e-6a9d-425a-ac83-897f19a1007d" Oct 11 04:54:29 crc kubenswrapper[4651]: E1011 04:54:29.360127 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-bv8hd" podUID="fb3ff40a-c2bf-42d3-920b-b94751bed645" Oct 11 04:54:29 crc kubenswrapper[4651]: E1011 04:54:29.360159 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-5cw4f" podUID="aa37954c-dd94-44fc-86e6-8fb23c429af1" Oct 11 04:54:29 crc kubenswrapper[4651]: I1011 04:54:29.402348 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-tgvv8" podStartSLOduration=170.402325447 podStartE2EDuration="2m50.402325447s" podCreationTimestamp="2025-10-11 04:51:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 04:54:29.393910217 +0000 UTC m=+190.290143023" watchObservedRunningTime="2025-10-11 04:54:29.402325447 +0000 UTC m=+190.298558263" Oct 11 04:54:35 crc kubenswrapper[4651]: I1011 04:54:35.391146 4651 generic.go:334] "Generic (PLEG): container finished" podID="ac326965-3277-44c2-bd27-773586999e23" containerID="9e78bcadb7372b4d22b523f1b844f235bec688e5be55abfa8edf3ec2c42f62e4" exitCode=0 Oct 11 04:54:35 crc kubenswrapper[4651]: I1011 04:54:35.391219 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-drmcl" event={"ID":"ac326965-3277-44c2-bd27-773586999e23","Type":"ContainerDied","Data":"9e78bcadb7372b4d22b523f1b844f235bec688e5be55abfa8edf3ec2c42f62e4"} Oct 11 04:54:36 crc kubenswrapper[4651]: I1011 04:54:36.402101 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-drmcl" event={"ID":"ac326965-3277-44c2-bd27-773586999e23","Type":"ContainerStarted","Data":"1d45773b7318f593d0a25680d4464831ab23d910dd4adceaa6f67558de857d35"} Oct 11 04:54:36 crc kubenswrapper[4651]: I1011 04:54:36.434365 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-drmcl" podStartSLOduration=2.456719777 podStartE2EDuration="47.43433663s" podCreationTimestamp="2025-10-11 04:53:49 +0000 UTC" firstStartedPulling="2025-10-11 04:53:51.004870853 +0000 UTC m=+151.901103649" lastFinishedPulling="2025-10-11 04:54:35.982487666 +0000 UTC m=+196.878720502" observedRunningTime="2025-10-11 04:54:36.423803774 +0000 UTC m=+197.320036630" watchObservedRunningTime="2025-10-11 04:54:36.43433663 +0000 UTC m=+197.330569466" Oct 11 04:54:39 crc kubenswrapper[4651]: I1011 04:54:39.685555 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-drmcl" Oct 11 04:54:39 crc kubenswrapper[4651]: I1011 04:54:39.685601 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-drmcl" Oct 11 04:54:40 crc kubenswrapper[4651]: I1011 04:54:40.843491 4651 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-drmcl" podUID="ac326965-3277-44c2-bd27-773586999e23" containerName="registry-server" probeResult="failure" output=< Oct 11 04:54:40 crc kubenswrapper[4651]: timeout: failed to connect service ":50051" within 1s Oct 11 04:54:40 crc kubenswrapper[4651]: > Oct 11 04:54:41 crc kubenswrapper[4651]: I1011 04:54:41.437966 4651 generic.go:334] "Generic (PLEG): container finished" podID="dd3f200d-51eb-4758-b64a-10e9c8e36b65" containerID="6c0558543b8ba84616c427a4a8b7a3096907e334f8b452e1bd6f39d663c3c17f" exitCode=0 Oct 11 04:54:41 crc kubenswrapper[4651]: I1011 04:54:41.438029 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kpkh2" event={"ID":"dd3f200d-51eb-4758-b64a-10e9c8e36b65","Type":"ContainerDied","Data":"6c0558543b8ba84616c427a4a8b7a3096907e334f8b452e1bd6f39d663c3c17f"} Oct 11 04:54:42 crc kubenswrapper[4651]: I1011 04:54:42.444947 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kpkh2" event={"ID":"dd3f200d-51eb-4758-b64a-10e9c8e36b65","Type":"ContainerStarted","Data":"9c44d2c9bb9de3f77dfbce40a9b36b1d9b223866f1adba4db06f285a7ccba479"} Oct 11 04:54:42 crc kubenswrapper[4651]: I1011 04:54:42.460318 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-kpkh2" podStartSLOduration=2.629710837 podStartE2EDuration="54.460299579s" podCreationTimestamp="2025-10-11 04:53:48 +0000 UTC" firstStartedPulling="2025-10-11 04:53:49.983519266 +0000 UTC m=+150.879752062" lastFinishedPulling="2025-10-11 04:54:41.814108008 +0000 UTC m=+202.710340804" observedRunningTime="2025-10-11 04:54:42.459994643 +0000 UTC m=+203.356227469" watchObservedRunningTime="2025-10-11 04:54:42.460299579 +0000 UTC m=+203.356532375" Oct 11 04:54:44 crc kubenswrapper[4651]: I1011 04:54:44.471212 4651 generic.go:334] "Generic (PLEG): container finished" podID="24573bf5-c576-4e14-b3ac-f33e6ca99af3" containerID="e833e5a49b922dfe38820ce5d782ffb180be1111a0e6ba58c8968a1812652fd6" exitCode=0 Oct 11 04:54:44 crc kubenswrapper[4651]: I1011 04:54:44.471285 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6gt5n" event={"ID":"24573bf5-c576-4e14-b3ac-f33e6ca99af3","Type":"ContainerDied","Data":"e833e5a49b922dfe38820ce5d782ffb180be1111a0e6ba58c8968a1812652fd6"} Oct 11 04:54:44 crc kubenswrapper[4651]: I1011 04:54:44.475951 4651 generic.go:334] "Generic (PLEG): container finished" podID="b23fe5ff-6a7c-4363-b1cd-25e74ba69e7c" containerID="10e5c5761162b859828229031abd412e75c1f97938c3746f9f534c216115dbe3" exitCode=0 Oct 11 04:54:44 crc kubenswrapper[4651]: I1011 04:54:44.475992 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g9dtw" event={"ID":"b23fe5ff-6a7c-4363-b1cd-25e74ba69e7c","Type":"ContainerDied","Data":"10e5c5761162b859828229031abd412e75c1f97938c3746f9f534c216115dbe3"} Oct 11 04:54:45 crc kubenswrapper[4651]: I1011 04:54:45.487914 4651 generic.go:334] "Generic (PLEG): container finished" podID="ad64884c-e97d-4dc2-8f86-a44c537f7068" containerID="bfbbddbac19ada18a351a881fb377aa4fb5fd7fdb8a6befc81f103fb1013585c" exitCode=0 Oct 11 04:54:45 crc kubenswrapper[4651]: I1011 04:54:45.487980 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vtcqh" event={"ID":"ad64884c-e97d-4dc2-8f86-a44c537f7068","Type":"ContainerDied","Data":"bfbbddbac19ada18a351a881fb377aa4fb5fd7fdb8a6befc81f103fb1013585c"} Oct 11 04:54:45 crc kubenswrapper[4651]: I1011 04:54:45.491399 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g9dtw" event={"ID":"b23fe5ff-6a7c-4363-b1cd-25e74ba69e7c","Type":"ContainerStarted","Data":"db3802d8ac52a3253e593ac9f76da7ab912261b2ef7dc4ef872f6072420b8c03"} Oct 11 04:54:45 crc kubenswrapper[4651]: I1011 04:54:45.494789 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6gt5n" event={"ID":"24573bf5-c576-4e14-b3ac-f33e6ca99af3","Type":"ContainerStarted","Data":"7e88c131e0f34e2ca13b70b0058e9681ced1639f903478939165a46abc2ac4e6"} Oct 11 04:54:45 crc kubenswrapper[4651]: I1011 04:54:45.497223 4651 generic.go:334] "Generic (PLEG): container finished" podID="fb3ff40a-c2bf-42d3-920b-b94751bed645" containerID="a7f94050b37e1d12cd826cc9d7ece91b59677fc18b01b49895e0652f15baf346" exitCode=0 Oct 11 04:54:45 crc kubenswrapper[4651]: I1011 04:54:45.497275 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bv8hd" event={"ID":"fb3ff40a-c2bf-42d3-920b-b94751bed645","Type":"ContainerDied","Data":"a7f94050b37e1d12cd826cc9d7ece91b59677fc18b01b49895e0652f15baf346"} Oct 11 04:54:45 crc kubenswrapper[4651]: I1011 04:54:45.499634 4651 generic.go:334] "Generic (PLEG): container finished" podID="aa37954c-dd94-44fc-86e6-8fb23c429af1" containerID="19a237e49261c4c890b74d23d94ea6c8cb6067418203fa98eca4975fcef10195" exitCode=0 Oct 11 04:54:45 crc kubenswrapper[4651]: I1011 04:54:45.499658 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5cw4f" event={"ID":"aa37954c-dd94-44fc-86e6-8fb23c429af1","Type":"ContainerDied","Data":"19a237e49261c4c890b74d23d94ea6c8cb6067418203fa98eca4975fcef10195"} Oct 11 04:54:45 crc kubenswrapper[4651]: I1011 04:54:45.562567 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-g9dtw" podStartSLOduration=3.667209786 podStartE2EDuration="58.562545533s" podCreationTimestamp="2025-10-11 04:53:47 +0000 UTC" firstStartedPulling="2025-10-11 04:53:49.976859362 +0000 UTC m=+150.873092158" lastFinishedPulling="2025-10-11 04:54:44.872195109 +0000 UTC m=+205.768427905" observedRunningTime="2025-10-11 04:54:45.560090521 +0000 UTC m=+206.456323317" watchObservedRunningTime="2025-10-11 04:54:45.562545533 +0000 UTC m=+206.458778329" Oct 11 04:54:45 crc kubenswrapper[4651]: I1011 04:54:45.588353 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-6gt5n" podStartSLOduration=3.327843249 podStartE2EDuration="59.588336822s" podCreationTimestamp="2025-10-11 04:53:46 +0000 UTC" firstStartedPulling="2025-10-11 04:53:48.793808494 +0000 UTC m=+149.690041290" lastFinishedPulling="2025-10-11 04:54:45.054302067 +0000 UTC m=+205.950534863" observedRunningTime="2025-10-11 04:54:45.586055214 +0000 UTC m=+206.482288010" watchObservedRunningTime="2025-10-11 04:54:45.588336822 +0000 UTC m=+206.484569618" Oct 11 04:54:46 crc kubenswrapper[4651]: I1011 04:54:46.310671 4651 patch_prober.go:28] interesting pod/machine-config-daemon-78jnv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 04:54:46 crc kubenswrapper[4651]: I1011 04:54:46.311125 4651 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 04:54:46 crc kubenswrapper[4651]: I1011 04:54:46.311205 4651 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" Oct 11 04:54:46 crc kubenswrapper[4651]: I1011 04:54:46.312221 4651 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"583486a7d6efd8d25aaeac78ea92f84834730c641516b6ade77aeda2baef58e9"} pod="openshift-machine-config-operator/machine-config-daemon-78jnv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 11 04:54:46 crc kubenswrapper[4651]: I1011 04:54:46.312452 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" containerName="machine-config-daemon" containerID="cri-o://583486a7d6efd8d25aaeac78ea92f84834730c641516b6ade77aeda2baef58e9" gracePeriod=600 Oct 11 04:54:46 crc kubenswrapper[4651]: I1011 04:54:46.509136 4651 generic.go:334] "Generic (PLEG): container finished" podID="dd02099e-6a9d-425a-ac83-897f19a1007d" containerID="49e7a60ae9774d726d6ef0e80c4ff6871f58731d1cde1c610d79a29628e13d23" exitCode=0 Oct 11 04:54:46 crc kubenswrapper[4651]: I1011 04:54:46.509194 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7d6h5" event={"ID":"dd02099e-6a9d-425a-ac83-897f19a1007d","Type":"ContainerDied","Data":"49e7a60ae9774d726d6ef0e80c4ff6871f58731d1cde1c610d79a29628e13d23"} Oct 11 04:54:46 crc kubenswrapper[4651]: I1011 04:54:46.543123 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-6gt5n" Oct 11 04:54:46 crc kubenswrapper[4651]: I1011 04:54:46.543200 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-6gt5n" Oct 11 04:54:47 crc kubenswrapper[4651]: I1011 04:54:47.515123 4651 generic.go:334] "Generic (PLEG): container finished" podID="519a1ae1-e964-48b0-8b61-835146df28c1" containerID="583486a7d6efd8d25aaeac78ea92f84834730c641516b6ade77aeda2baef58e9" exitCode=0 Oct 11 04:54:47 crc kubenswrapper[4651]: I1011 04:54:47.515315 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" event={"ID":"519a1ae1-e964-48b0-8b61-835146df28c1","Type":"ContainerDied","Data":"583486a7d6efd8d25aaeac78ea92f84834730c641516b6ade77aeda2baef58e9"} Oct 11 04:54:47 crc kubenswrapper[4651]: I1011 04:54:47.592538 4651 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-6gt5n" podUID="24573bf5-c576-4e14-b3ac-f33e6ca99af3" containerName="registry-server" probeResult="failure" output=< Oct 11 04:54:47 crc kubenswrapper[4651]: timeout: failed to connect service ":50051" within 1s Oct 11 04:54:47 crc kubenswrapper[4651]: > Oct 11 04:54:48 crc kubenswrapper[4651]: I1011 04:54:48.311491 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-g9dtw" Oct 11 04:54:48 crc kubenswrapper[4651]: I1011 04:54:48.312967 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-g9dtw" Oct 11 04:54:48 crc kubenswrapper[4651]: I1011 04:54:48.388313 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-g9dtw" Oct 11 04:54:48 crc kubenswrapper[4651]: I1011 04:54:48.522977 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" event={"ID":"519a1ae1-e964-48b0-8b61-835146df28c1","Type":"ContainerStarted","Data":"5ba3105ff1f646ac3c317b8777235fea6078905260c012507f606af9534c8bd2"} Oct 11 04:54:48 crc kubenswrapper[4651]: I1011 04:54:48.721935 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-kpkh2" Oct 11 04:54:48 crc kubenswrapper[4651]: I1011 04:54:48.722204 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-kpkh2" Oct 11 04:54:48 crc kubenswrapper[4651]: I1011 04:54:48.772853 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-kpkh2" Oct 11 04:54:49 crc kubenswrapper[4651]: I1011 04:54:49.537086 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vtcqh" event={"ID":"ad64884c-e97d-4dc2-8f86-a44c537f7068","Type":"ContainerStarted","Data":"32308b968e1b4ada23facceee4f34e59251a6ab99097a2c817b18b2f96d874c5"} Oct 11 04:54:49 crc kubenswrapper[4651]: I1011 04:54:49.542884 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bv8hd" event={"ID":"fb3ff40a-c2bf-42d3-920b-b94751bed645","Type":"ContainerStarted","Data":"929cb6c0b1be435fdbce1a6a70658f2a6ff17353b360b50bf6c9d360d29ef5de"} Oct 11 04:54:49 crc kubenswrapper[4651]: I1011 04:54:49.546584 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5cw4f" event={"ID":"aa37954c-dd94-44fc-86e6-8fb23c429af1","Type":"ContainerStarted","Data":"b3f88d0b3b78c5e004814d06f1fb6dc985804875e7b56ca493b9431b419eb607"} Oct 11 04:54:49 crc kubenswrapper[4651]: I1011 04:54:49.549335 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7d6h5" event={"ID":"dd02099e-6a9d-425a-ac83-897f19a1007d","Type":"ContainerStarted","Data":"0c8c93d396ea747b0c698fd86e3e97b3e50ac50def289b9f1941f32d02fdff30"} Oct 11 04:54:49 crc kubenswrapper[4651]: I1011 04:54:49.559708 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-vtcqh" podStartSLOduration=4.909683512 podStartE2EDuration="1m4.559647496s" podCreationTimestamp="2025-10-11 04:53:45 +0000 UTC" firstStartedPulling="2025-10-11 04:53:48.772378813 +0000 UTC m=+149.668611609" lastFinishedPulling="2025-10-11 04:54:48.422342797 +0000 UTC m=+209.318575593" observedRunningTime="2025-10-11 04:54:49.556686964 +0000 UTC m=+210.452919760" watchObservedRunningTime="2025-10-11 04:54:49.559647496 +0000 UTC m=+210.455880292" Oct 11 04:54:49 crc kubenswrapper[4651]: I1011 04:54:49.584541 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-7d6h5" podStartSLOduration=3.7749913189999997 podStartE2EDuration="1m3.584520136s" podCreationTimestamp="2025-10-11 04:53:46 +0000 UTC" firstStartedPulling="2025-10-11 04:53:48.866323031 +0000 UTC m=+149.762555827" lastFinishedPulling="2025-10-11 04:54:48.675851848 +0000 UTC m=+209.572084644" observedRunningTime="2025-10-11 04:54:49.577164422 +0000 UTC m=+210.473397228" watchObservedRunningTime="2025-10-11 04:54:49.584520136 +0000 UTC m=+210.480752932" Oct 11 04:54:49 crc kubenswrapper[4651]: I1011 04:54:49.594425 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-bv8hd" podStartSLOduration=4.020563256 podStartE2EDuration="1m3.594409243s" podCreationTimestamp="2025-10-11 04:53:46 +0000 UTC" firstStartedPulling="2025-10-11 04:53:48.820932924 +0000 UTC m=+149.717165720" lastFinishedPulling="2025-10-11 04:54:48.394778911 +0000 UTC m=+209.291011707" observedRunningTime="2025-10-11 04:54:49.591326898 +0000 UTC m=+210.487559714" watchObservedRunningTime="2025-10-11 04:54:49.594409243 +0000 UTC m=+210.490642039" Oct 11 04:54:49 crc kubenswrapper[4651]: I1011 04:54:49.606577 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-5cw4f" podStartSLOduration=3.122871679 podStartE2EDuration="1m0.606562327s" podCreationTimestamp="2025-10-11 04:53:49 +0000 UTC" firstStartedPulling="2025-10-11 04:53:51.001290509 +0000 UTC m=+151.897523305" lastFinishedPulling="2025-10-11 04:54:48.484981157 +0000 UTC m=+209.381213953" observedRunningTime="2025-10-11 04:54:49.60431978 +0000 UTC m=+210.500552576" watchObservedRunningTime="2025-10-11 04:54:49.606562327 +0000 UTC m=+210.502795123" Oct 11 04:54:49 crc kubenswrapper[4651]: I1011 04:54:49.607272 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-kpkh2" Oct 11 04:54:49 crc kubenswrapper[4651]: I1011 04:54:49.732750 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-drmcl" Oct 11 04:54:49 crc kubenswrapper[4651]: I1011 04:54:49.769049 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-drmcl" Oct 11 04:54:50 crc kubenswrapper[4651]: I1011 04:54:50.099458 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-5cw4f" Oct 11 04:54:50 crc kubenswrapper[4651]: I1011 04:54:50.099864 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-5cw4f" Oct 11 04:54:51 crc kubenswrapper[4651]: I1011 04:54:51.133455 4651 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-5cw4f" podUID="aa37954c-dd94-44fc-86e6-8fb23c429af1" containerName="registry-server" probeResult="failure" output=< Oct 11 04:54:51 crc kubenswrapper[4651]: timeout: failed to connect service ":50051" within 1s Oct 11 04:54:51 crc kubenswrapper[4651]: > Oct 11 04:54:52 crc kubenswrapper[4651]: I1011 04:54:52.274241 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-kpkh2"] Oct 11 04:54:52 crc kubenswrapper[4651]: I1011 04:54:52.274726 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-kpkh2" podUID="dd3f200d-51eb-4758-b64a-10e9c8e36b65" containerName="registry-server" containerID="cri-o://9c44d2c9bb9de3f77dfbce40a9b36b1d9b223866f1adba4db06f285a7ccba479" gracePeriod=2 Oct 11 04:54:52 crc kubenswrapper[4651]: I1011 04:54:52.564081 4651 generic.go:334] "Generic (PLEG): container finished" podID="dd3f200d-51eb-4758-b64a-10e9c8e36b65" containerID="9c44d2c9bb9de3f77dfbce40a9b36b1d9b223866f1adba4db06f285a7ccba479" exitCode=0 Oct 11 04:54:52 crc kubenswrapper[4651]: I1011 04:54:52.564137 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kpkh2" event={"ID":"dd3f200d-51eb-4758-b64a-10e9c8e36b65","Type":"ContainerDied","Data":"9c44d2c9bb9de3f77dfbce40a9b36b1d9b223866f1adba4db06f285a7ccba479"} Oct 11 04:54:52 crc kubenswrapper[4651]: I1011 04:54:52.634699 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kpkh2" Oct 11 04:54:52 crc kubenswrapper[4651]: I1011 04:54:52.731880 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zrz7h\" (UniqueName: \"kubernetes.io/projected/dd3f200d-51eb-4758-b64a-10e9c8e36b65-kube-api-access-zrz7h\") pod \"dd3f200d-51eb-4758-b64a-10e9c8e36b65\" (UID: \"dd3f200d-51eb-4758-b64a-10e9c8e36b65\") " Oct 11 04:54:52 crc kubenswrapper[4651]: I1011 04:54:52.731943 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dd3f200d-51eb-4758-b64a-10e9c8e36b65-catalog-content\") pod \"dd3f200d-51eb-4758-b64a-10e9c8e36b65\" (UID: \"dd3f200d-51eb-4758-b64a-10e9c8e36b65\") " Oct 11 04:54:52 crc kubenswrapper[4651]: I1011 04:54:52.731991 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dd3f200d-51eb-4758-b64a-10e9c8e36b65-utilities\") pod \"dd3f200d-51eb-4758-b64a-10e9c8e36b65\" (UID: \"dd3f200d-51eb-4758-b64a-10e9c8e36b65\") " Oct 11 04:54:52 crc kubenswrapper[4651]: I1011 04:54:52.733119 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dd3f200d-51eb-4758-b64a-10e9c8e36b65-utilities" (OuterVolumeSpecName: "utilities") pod "dd3f200d-51eb-4758-b64a-10e9c8e36b65" (UID: "dd3f200d-51eb-4758-b64a-10e9c8e36b65"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 04:54:52 crc kubenswrapper[4651]: I1011 04:54:52.733418 4651 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dd3f200d-51eb-4758-b64a-10e9c8e36b65-utilities\") on node \"crc\" DevicePath \"\"" Oct 11 04:54:52 crc kubenswrapper[4651]: I1011 04:54:52.739974 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dd3f200d-51eb-4758-b64a-10e9c8e36b65-kube-api-access-zrz7h" (OuterVolumeSpecName: "kube-api-access-zrz7h") pod "dd3f200d-51eb-4758-b64a-10e9c8e36b65" (UID: "dd3f200d-51eb-4758-b64a-10e9c8e36b65"). InnerVolumeSpecName "kube-api-access-zrz7h". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 04:54:52 crc kubenswrapper[4651]: I1011 04:54:52.743924 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dd3f200d-51eb-4758-b64a-10e9c8e36b65-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "dd3f200d-51eb-4758-b64a-10e9c8e36b65" (UID: "dd3f200d-51eb-4758-b64a-10e9c8e36b65"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 04:54:52 crc kubenswrapper[4651]: I1011 04:54:52.834913 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zrz7h\" (UniqueName: \"kubernetes.io/projected/dd3f200d-51eb-4758-b64a-10e9c8e36b65-kube-api-access-zrz7h\") on node \"crc\" DevicePath \"\"" Oct 11 04:54:52 crc kubenswrapper[4651]: I1011 04:54:52.834948 4651 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dd3f200d-51eb-4758-b64a-10e9c8e36b65-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 11 04:54:53 crc kubenswrapper[4651]: I1011 04:54:53.571789 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kpkh2" event={"ID":"dd3f200d-51eb-4758-b64a-10e9c8e36b65","Type":"ContainerDied","Data":"d77f68057e9284c471605711818dd6093d36979c971e741949be4e2e36622688"} Oct 11 04:54:53 crc kubenswrapper[4651]: I1011 04:54:53.571848 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kpkh2" Oct 11 04:54:53 crc kubenswrapper[4651]: I1011 04:54:53.571851 4651 scope.go:117] "RemoveContainer" containerID="9c44d2c9bb9de3f77dfbce40a9b36b1d9b223866f1adba4db06f285a7ccba479" Oct 11 04:54:53 crc kubenswrapper[4651]: I1011 04:54:53.589165 4651 scope.go:117] "RemoveContainer" containerID="6c0558543b8ba84616c427a4a8b7a3096907e334f8b452e1bd6f39d663c3c17f" Oct 11 04:54:53 crc kubenswrapper[4651]: I1011 04:54:53.597022 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-kpkh2"] Oct 11 04:54:53 crc kubenswrapper[4651]: I1011 04:54:53.600527 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-kpkh2"] Oct 11 04:54:53 crc kubenswrapper[4651]: I1011 04:54:53.615664 4651 scope.go:117] "RemoveContainer" containerID="950d2a73ced9f02ee969d66bfe89d6f830cb3dd486e0f5f9480bc6845c63120b" Oct 11 04:54:53 crc kubenswrapper[4651]: I1011 04:54:53.875496 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dd3f200d-51eb-4758-b64a-10e9c8e36b65" path="/var/lib/kubelet/pods/dd3f200d-51eb-4758-b64a-10e9c8e36b65/volumes" Oct 11 04:54:56 crc kubenswrapper[4651]: I1011 04:54:56.377391 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-vtcqh" Oct 11 04:54:56 crc kubenswrapper[4651]: I1011 04:54:56.377696 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-vtcqh" Oct 11 04:54:56 crc kubenswrapper[4651]: I1011 04:54:56.420946 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-vtcqh" Oct 11 04:54:56 crc kubenswrapper[4651]: I1011 04:54:56.580934 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-6gt5n" Oct 11 04:54:56 crc kubenswrapper[4651]: I1011 04:54:56.631392 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-vtcqh" Oct 11 04:54:56 crc kubenswrapper[4651]: I1011 04:54:56.634864 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-6gt5n" Oct 11 04:54:56 crc kubenswrapper[4651]: I1011 04:54:56.847882 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-bv8hd" Oct 11 04:54:56 crc kubenswrapper[4651]: I1011 04:54:56.848318 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-bv8hd" Oct 11 04:54:56 crc kubenswrapper[4651]: I1011 04:54:56.900765 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-bv8hd" Oct 11 04:54:57 crc kubenswrapper[4651]: I1011 04:54:57.034914 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-7d6h5" Oct 11 04:54:57 crc kubenswrapper[4651]: I1011 04:54:57.035258 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-7d6h5" Oct 11 04:54:57 crc kubenswrapper[4651]: I1011 04:54:57.076758 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-7d6h5" Oct 11 04:54:57 crc kubenswrapper[4651]: I1011 04:54:57.626672 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-7d6h5" Oct 11 04:54:57 crc kubenswrapper[4651]: I1011 04:54:57.628166 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-bv8hd" Oct 11 04:54:58 crc kubenswrapper[4651]: I1011 04:54:58.360236 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-g9dtw" Oct 11 04:54:58 crc kubenswrapper[4651]: I1011 04:54:58.877065 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-7d6h5"] Oct 11 04:54:59 crc kubenswrapper[4651]: I1011 04:54:59.077085 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-bv8hd"] Oct 11 04:54:59 crc kubenswrapper[4651]: I1011 04:54:59.598966 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-bv8hd" podUID="fb3ff40a-c2bf-42d3-920b-b94751bed645" containerName="registry-server" containerID="cri-o://929cb6c0b1be435fdbce1a6a70658f2a6ff17353b360b50bf6c9d360d29ef5de" gracePeriod=2 Oct 11 04:54:59 crc kubenswrapper[4651]: I1011 04:54:59.765981 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-zqjmp"] Oct 11 04:55:00 crc kubenswrapper[4651]: I1011 04:55:00.146221 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-5cw4f" Oct 11 04:55:00 crc kubenswrapper[4651]: I1011 04:55:00.155470 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bv8hd" Oct 11 04:55:00 crc kubenswrapper[4651]: I1011 04:55:00.199535 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-5cw4f" Oct 11 04:55:00 crc kubenswrapper[4651]: I1011 04:55:00.238809 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fb3ff40a-c2bf-42d3-920b-b94751bed645-catalog-content\") pod \"fb3ff40a-c2bf-42d3-920b-b94751bed645\" (UID: \"fb3ff40a-c2bf-42d3-920b-b94751bed645\") " Oct 11 04:55:00 crc kubenswrapper[4651]: I1011 04:55:00.238982 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g74tz\" (UniqueName: \"kubernetes.io/projected/fb3ff40a-c2bf-42d3-920b-b94751bed645-kube-api-access-g74tz\") pod \"fb3ff40a-c2bf-42d3-920b-b94751bed645\" (UID: \"fb3ff40a-c2bf-42d3-920b-b94751bed645\") " Oct 11 04:55:00 crc kubenswrapper[4651]: I1011 04:55:00.239019 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fb3ff40a-c2bf-42d3-920b-b94751bed645-utilities\") pod \"fb3ff40a-c2bf-42d3-920b-b94751bed645\" (UID: \"fb3ff40a-c2bf-42d3-920b-b94751bed645\") " Oct 11 04:55:00 crc kubenswrapper[4651]: I1011 04:55:00.240264 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fb3ff40a-c2bf-42d3-920b-b94751bed645-utilities" (OuterVolumeSpecName: "utilities") pod "fb3ff40a-c2bf-42d3-920b-b94751bed645" (UID: "fb3ff40a-c2bf-42d3-920b-b94751bed645"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 04:55:00 crc kubenswrapper[4651]: I1011 04:55:00.244840 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fb3ff40a-c2bf-42d3-920b-b94751bed645-kube-api-access-g74tz" (OuterVolumeSpecName: "kube-api-access-g74tz") pod "fb3ff40a-c2bf-42d3-920b-b94751bed645" (UID: "fb3ff40a-c2bf-42d3-920b-b94751bed645"). InnerVolumeSpecName "kube-api-access-g74tz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 04:55:00 crc kubenswrapper[4651]: I1011 04:55:00.279860 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fb3ff40a-c2bf-42d3-920b-b94751bed645-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fb3ff40a-c2bf-42d3-920b-b94751bed645" (UID: "fb3ff40a-c2bf-42d3-920b-b94751bed645"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 04:55:00 crc kubenswrapper[4651]: I1011 04:55:00.340695 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g74tz\" (UniqueName: \"kubernetes.io/projected/fb3ff40a-c2bf-42d3-920b-b94751bed645-kube-api-access-g74tz\") on node \"crc\" DevicePath \"\"" Oct 11 04:55:00 crc kubenswrapper[4651]: I1011 04:55:00.340750 4651 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fb3ff40a-c2bf-42d3-920b-b94751bed645-utilities\") on node \"crc\" DevicePath \"\"" Oct 11 04:55:00 crc kubenswrapper[4651]: I1011 04:55:00.340771 4651 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fb3ff40a-c2bf-42d3-920b-b94751bed645-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 11 04:55:00 crc kubenswrapper[4651]: I1011 04:55:00.606089 4651 generic.go:334] "Generic (PLEG): container finished" podID="fb3ff40a-c2bf-42d3-920b-b94751bed645" containerID="929cb6c0b1be435fdbce1a6a70658f2a6ff17353b360b50bf6c9d360d29ef5de" exitCode=0 Oct 11 04:55:00 crc kubenswrapper[4651]: I1011 04:55:00.606168 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bv8hd" event={"ID":"fb3ff40a-c2bf-42d3-920b-b94751bed645","Type":"ContainerDied","Data":"929cb6c0b1be435fdbce1a6a70658f2a6ff17353b360b50bf6c9d360d29ef5de"} Oct 11 04:55:00 crc kubenswrapper[4651]: I1011 04:55:00.606265 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bv8hd" event={"ID":"fb3ff40a-c2bf-42d3-920b-b94751bed645","Type":"ContainerDied","Data":"153d669a81ada5d1a8cc827685e7bb67edc17018c253b78e33fcb2a0deae0f97"} Oct 11 04:55:00 crc kubenswrapper[4651]: I1011 04:55:00.606326 4651 scope.go:117] "RemoveContainer" containerID="929cb6c0b1be435fdbce1a6a70658f2a6ff17353b360b50bf6c9d360d29ef5de" Oct 11 04:55:00 crc kubenswrapper[4651]: I1011 04:55:00.606465 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bv8hd" Oct 11 04:55:00 crc kubenswrapper[4651]: I1011 04:55:00.608173 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-7d6h5" podUID="dd02099e-6a9d-425a-ac83-897f19a1007d" containerName="registry-server" containerID="cri-o://0c8c93d396ea747b0c698fd86e3e97b3e50ac50def289b9f1941f32d02fdff30" gracePeriod=2 Oct 11 04:55:00 crc kubenswrapper[4651]: I1011 04:55:00.626706 4651 scope.go:117] "RemoveContainer" containerID="a7f94050b37e1d12cd826cc9d7ece91b59677fc18b01b49895e0652f15baf346" Oct 11 04:55:00 crc kubenswrapper[4651]: I1011 04:55:00.643678 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-bv8hd"] Oct 11 04:55:00 crc kubenswrapper[4651]: I1011 04:55:00.645021 4651 scope.go:117] "RemoveContainer" containerID="20f812c2f87b0f7207eef434f0662b0e724a6bc91ca50b8c2b6054ea22b74e83" Oct 11 04:55:00 crc kubenswrapper[4651]: I1011 04:55:00.649097 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-bv8hd"] Oct 11 04:55:00 crc kubenswrapper[4651]: I1011 04:55:00.664573 4651 scope.go:117] "RemoveContainer" containerID="929cb6c0b1be435fdbce1a6a70658f2a6ff17353b360b50bf6c9d360d29ef5de" Oct 11 04:55:00 crc kubenswrapper[4651]: E1011 04:55:00.665206 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"929cb6c0b1be435fdbce1a6a70658f2a6ff17353b360b50bf6c9d360d29ef5de\": container with ID starting with 929cb6c0b1be435fdbce1a6a70658f2a6ff17353b360b50bf6c9d360d29ef5de not found: ID does not exist" containerID="929cb6c0b1be435fdbce1a6a70658f2a6ff17353b360b50bf6c9d360d29ef5de" Oct 11 04:55:00 crc kubenswrapper[4651]: I1011 04:55:00.665260 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"929cb6c0b1be435fdbce1a6a70658f2a6ff17353b360b50bf6c9d360d29ef5de"} err="failed to get container status \"929cb6c0b1be435fdbce1a6a70658f2a6ff17353b360b50bf6c9d360d29ef5de\": rpc error: code = NotFound desc = could not find container \"929cb6c0b1be435fdbce1a6a70658f2a6ff17353b360b50bf6c9d360d29ef5de\": container with ID starting with 929cb6c0b1be435fdbce1a6a70658f2a6ff17353b360b50bf6c9d360d29ef5de not found: ID does not exist" Oct 11 04:55:00 crc kubenswrapper[4651]: I1011 04:55:00.665327 4651 scope.go:117] "RemoveContainer" containerID="a7f94050b37e1d12cd826cc9d7ece91b59677fc18b01b49895e0652f15baf346" Oct 11 04:55:00 crc kubenswrapper[4651]: E1011 04:55:00.665774 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a7f94050b37e1d12cd826cc9d7ece91b59677fc18b01b49895e0652f15baf346\": container with ID starting with a7f94050b37e1d12cd826cc9d7ece91b59677fc18b01b49895e0652f15baf346 not found: ID does not exist" containerID="a7f94050b37e1d12cd826cc9d7ece91b59677fc18b01b49895e0652f15baf346" Oct 11 04:55:00 crc kubenswrapper[4651]: I1011 04:55:00.665856 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a7f94050b37e1d12cd826cc9d7ece91b59677fc18b01b49895e0652f15baf346"} err="failed to get container status \"a7f94050b37e1d12cd826cc9d7ece91b59677fc18b01b49895e0652f15baf346\": rpc error: code = NotFound desc = could not find container \"a7f94050b37e1d12cd826cc9d7ece91b59677fc18b01b49895e0652f15baf346\": container with ID starting with a7f94050b37e1d12cd826cc9d7ece91b59677fc18b01b49895e0652f15baf346 not found: ID does not exist" Oct 11 04:55:00 crc kubenswrapper[4651]: I1011 04:55:00.665890 4651 scope.go:117] "RemoveContainer" containerID="20f812c2f87b0f7207eef434f0662b0e724a6bc91ca50b8c2b6054ea22b74e83" Oct 11 04:55:00 crc kubenswrapper[4651]: E1011 04:55:00.666204 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"20f812c2f87b0f7207eef434f0662b0e724a6bc91ca50b8c2b6054ea22b74e83\": container with ID starting with 20f812c2f87b0f7207eef434f0662b0e724a6bc91ca50b8c2b6054ea22b74e83 not found: ID does not exist" containerID="20f812c2f87b0f7207eef434f0662b0e724a6bc91ca50b8c2b6054ea22b74e83" Oct 11 04:55:00 crc kubenswrapper[4651]: I1011 04:55:00.666227 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"20f812c2f87b0f7207eef434f0662b0e724a6bc91ca50b8c2b6054ea22b74e83"} err="failed to get container status \"20f812c2f87b0f7207eef434f0662b0e724a6bc91ca50b8c2b6054ea22b74e83\": rpc error: code = NotFound desc = could not find container \"20f812c2f87b0f7207eef434f0662b0e724a6bc91ca50b8c2b6054ea22b74e83\": container with ID starting with 20f812c2f87b0f7207eef434f0662b0e724a6bc91ca50b8c2b6054ea22b74e83 not found: ID does not exist" Oct 11 04:55:00 crc kubenswrapper[4651]: I1011 04:55:00.954662 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7d6h5" Oct 11 04:55:01 crc kubenswrapper[4651]: I1011 04:55:01.055135 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dd02099e-6a9d-425a-ac83-897f19a1007d-utilities\") pod \"dd02099e-6a9d-425a-ac83-897f19a1007d\" (UID: \"dd02099e-6a9d-425a-ac83-897f19a1007d\") " Oct 11 04:55:01 crc kubenswrapper[4651]: I1011 04:55:01.055227 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dd02099e-6a9d-425a-ac83-897f19a1007d-catalog-content\") pod \"dd02099e-6a9d-425a-ac83-897f19a1007d\" (UID: \"dd02099e-6a9d-425a-ac83-897f19a1007d\") " Oct 11 04:55:01 crc kubenswrapper[4651]: I1011 04:55:01.055251 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnndv\" (UniqueName: \"kubernetes.io/projected/dd02099e-6a9d-425a-ac83-897f19a1007d-kube-api-access-rnndv\") pod \"dd02099e-6a9d-425a-ac83-897f19a1007d\" (UID: \"dd02099e-6a9d-425a-ac83-897f19a1007d\") " Oct 11 04:55:01 crc kubenswrapper[4651]: I1011 04:55:01.056801 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dd02099e-6a9d-425a-ac83-897f19a1007d-utilities" (OuterVolumeSpecName: "utilities") pod "dd02099e-6a9d-425a-ac83-897f19a1007d" (UID: "dd02099e-6a9d-425a-ac83-897f19a1007d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 04:55:01 crc kubenswrapper[4651]: I1011 04:55:01.058086 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dd02099e-6a9d-425a-ac83-897f19a1007d-kube-api-access-rnndv" (OuterVolumeSpecName: "kube-api-access-rnndv") pod "dd02099e-6a9d-425a-ac83-897f19a1007d" (UID: "dd02099e-6a9d-425a-ac83-897f19a1007d"). InnerVolumeSpecName "kube-api-access-rnndv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 04:55:01 crc kubenswrapper[4651]: I1011 04:55:01.103519 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dd02099e-6a9d-425a-ac83-897f19a1007d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "dd02099e-6a9d-425a-ac83-897f19a1007d" (UID: "dd02099e-6a9d-425a-ac83-897f19a1007d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 04:55:01 crc kubenswrapper[4651]: I1011 04:55:01.156239 4651 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dd02099e-6a9d-425a-ac83-897f19a1007d-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 11 04:55:01 crc kubenswrapper[4651]: I1011 04:55:01.156989 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnndv\" (UniqueName: \"kubernetes.io/projected/dd02099e-6a9d-425a-ac83-897f19a1007d-kube-api-access-rnndv\") on node \"crc\" DevicePath \"\"" Oct 11 04:55:01 crc kubenswrapper[4651]: I1011 04:55:01.157069 4651 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dd02099e-6a9d-425a-ac83-897f19a1007d-utilities\") on node \"crc\" DevicePath \"\"" Oct 11 04:55:01 crc kubenswrapper[4651]: I1011 04:55:01.612684 4651 generic.go:334] "Generic (PLEG): container finished" podID="dd02099e-6a9d-425a-ac83-897f19a1007d" containerID="0c8c93d396ea747b0c698fd86e3e97b3e50ac50def289b9f1941f32d02fdff30" exitCode=0 Oct 11 04:55:01 crc kubenswrapper[4651]: I1011 04:55:01.612785 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7d6h5" Oct 11 04:55:01 crc kubenswrapper[4651]: I1011 04:55:01.613356 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7d6h5" event={"ID":"dd02099e-6a9d-425a-ac83-897f19a1007d","Type":"ContainerDied","Data":"0c8c93d396ea747b0c698fd86e3e97b3e50ac50def289b9f1941f32d02fdff30"} Oct 11 04:55:01 crc kubenswrapper[4651]: I1011 04:55:01.613404 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7d6h5" event={"ID":"dd02099e-6a9d-425a-ac83-897f19a1007d","Type":"ContainerDied","Data":"4044e2137ee1086e952b483ffe0ef91b105650ff160800178e29b06ef7fe5e8f"} Oct 11 04:55:01 crc kubenswrapper[4651]: I1011 04:55:01.613429 4651 scope.go:117] "RemoveContainer" containerID="0c8c93d396ea747b0c698fd86e3e97b3e50ac50def289b9f1941f32d02fdff30" Oct 11 04:55:01 crc kubenswrapper[4651]: I1011 04:55:01.639526 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-7d6h5"] Oct 11 04:55:01 crc kubenswrapper[4651]: I1011 04:55:01.640266 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-7d6h5"] Oct 11 04:55:01 crc kubenswrapper[4651]: I1011 04:55:01.641152 4651 scope.go:117] "RemoveContainer" containerID="49e7a60ae9774d726d6ef0e80c4ff6871f58731d1cde1c610d79a29628e13d23" Oct 11 04:55:01 crc kubenswrapper[4651]: I1011 04:55:01.670936 4651 scope.go:117] "RemoveContainer" containerID="b4c156891aa54bf33596d476df49e8120124dd8d3766d4ae4de35f78db9415ab" Oct 11 04:55:01 crc kubenswrapper[4651]: I1011 04:55:01.686008 4651 scope.go:117] "RemoveContainer" containerID="0c8c93d396ea747b0c698fd86e3e97b3e50ac50def289b9f1941f32d02fdff30" Oct 11 04:55:01 crc kubenswrapper[4651]: E1011 04:55:01.686348 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0c8c93d396ea747b0c698fd86e3e97b3e50ac50def289b9f1941f32d02fdff30\": container with ID starting with 0c8c93d396ea747b0c698fd86e3e97b3e50ac50def289b9f1941f32d02fdff30 not found: ID does not exist" containerID="0c8c93d396ea747b0c698fd86e3e97b3e50ac50def289b9f1941f32d02fdff30" Oct 11 04:55:01 crc kubenswrapper[4651]: I1011 04:55:01.686373 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0c8c93d396ea747b0c698fd86e3e97b3e50ac50def289b9f1941f32d02fdff30"} err="failed to get container status \"0c8c93d396ea747b0c698fd86e3e97b3e50ac50def289b9f1941f32d02fdff30\": rpc error: code = NotFound desc = could not find container \"0c8c93d396ea747b0c698fd86e3e97b3e50ac50def289b9f1941f32d02fdff30\": container with ID starting with 0c8c93d396ea747b0c698fd86e3e97b3e50ac50def289b9f1941f32d02fdff30 not found: ID does not exist" Oct 11 04:55:01 crc kubenswrapper[4651]: I1011 04:55:01.686395 4651 scope.go:117] "RemoveContainer" containerID="49e7a60ae9774d726d6ef0e80c4ff6871f58731d1cde1c610d79a29628e13d23" Oct 11 04:55:01 crc kubenswrapper[4651]: E1011 04:55:01.686619 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"49e7a60ae9774d726d6ef0e80c4ff6871f58731d1cde1c610d79a29628e13d23\": container with ID starting with 49e7a60ae9774d726d6ef0e80c4ff6871f58731d1cde1c610d79a29628e13d23 not found: ID does not exist" containerID="49e7a60ae9774d726d6ef0e80c4ff6871f58731d1cde1c610d79a29628e13d23" Oct 11 04:55:01 crc kubenswrapper[4651]: I1011 04:55:01.686641 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"49e7a60ae9774d726d6ef0e80c4ff6871f58731d1cde1c610d79a29628e13d23"} err="failed to get container status \"49e7a60ae9774d726d6ef0e80c4ff6871f58731d1cde1c610d79a29628e13d23\": rpc error: code = NotFound desc = could not find container \"49e7a60ae9774d726d6ef0e80c4ff6871f58731d1cde1c610d79a29628e13d23\": container with ID starting with 49e7a60ae9774d726d6ef0e80c4ff6871f58731d1cde1c610d79a29628e13d23 not found: ID does not exist" Oct 11 04:55:01 crc kubenswrapper[4651]: I1011 04:55:01.686655 4651 scope.go:117] "RemoveContainer" containerID="b4c156891aa54bf33596d476df49e8120124dd8d3766d4ae4de35f78db9415ab" Oct 11 04:55:01 crc kubenswrapper[4651]: E1011 04:55:01.686844 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b4c156891aa54bf33596d476df49e8120124dd8d3766d4ae4de35f78db9415ab\": container with ID starting with b4c156891aa54bf33596d476df49e8120124dd8d3766d4ae4de35f78db9415ab not found: ID does not exist" containerID="b4c156891aa54bf33596d476df49e8120124dd8d3766d4ae4de35f78db9415ab" Oct 11 04:55:01 crc kubenswrapper[4651]: I1011 04:55:01.686863 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b4c156891aa54bf33596d476df49e8120124dd8d3766d4ae4de35f78db9415ab"} err="failed to get container status \"b4c156891aa54bf33596d476df49e8120124dd8d3766d4ae4de35f78db9415ab\": rpc error: code = NotFound desc = could not find container \"b4c156891aa54bf33596d476df49e8120124dd8d3766d4ae4de35f78db9415ab\": container with ID starting with b4c156891aa54bf33596d476df49e8120124dd8d3766d4ae4de35f78db9415ab not found: ID does not exist" Oct 11 04:55:01 crc kubenswrapper[4651]: I1011 04:55:01.876225 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dd02099e-6a9d-425a-ac83-897f19a1007d" path="/var/lib/kubelet/pods/dd02099e-6a9d-425a-ac83-897f19a1007d/volumes" Oct 11 04:55:01 crc kubenswrapper[4651]: I1011 04:55:01.876793 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fb3ff40a-c2bf-42d3-920b-b94751bed645" path="/var/lib/kubelet/pods/fb3ff40a-c2bf-42d3-920b-b94751bed645/volumes" Oct 11 04:55:03 crc kubenswrapper[4651]: I1011 04:55:03.474921 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-5cw4f"] Oct 11 04:55:03 crc kubenswrapper[4651]: I1011 04:55:03.475135 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-5cw4f" podUID="aa37954c-dd94-44fc-86e6-8fb23c429af1" containerName="registry-server" containerID="cri-o://b3f88d0b3b78c5e004814d06f1fb6dc985804875e7b56ca493b9431b419eb607" gracePeriod=2 Oct 11 04:55:03 crc kubenswrapper[4651]: I1011 04:55:03.627538 4651 generic.go:334] "Generic (PLEG): container finished" podID="aa37954c-dd94-44fc-86e6-8fb23c429af1" containerID="b3f88d0b3b78c5e004814d06f1fb6dc985804875e7b56ca493b9431b419eb607" exitCode=0 Oct 11 04:55:03 crc kubenswrapper[4651]: I1011 04:55:03.627578 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5cw4f" event={"ID":"aa37954c-dd94-44fc-86e6-8fb23c429af1","Type":"ContainerDied","Data":"b3f88d0b3b78c5e004814d06f1fb6dc985804875e7b56ca493b9431b419eb607"} Oct 11 04:55:03 crc kubenswrapper[4651]: I1011 04:55:03.848768 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5cw4f" Oct 11 04:55:03 crc kubenswrapper[4651]: I1011 04:55:03.991170 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l8npx\" (UniqueName: \"kubernetes.io/projected/aa37954c-dd94-44fc-86e6-8fb23c429af1-kube-api-access-l8npx\") pod \"aa37954c-dd94-44fc-86e6-8fb23c429af1\" (UID: \"aa37954c-dd94-44fc-86e6-8fb23c429af1\") " Oct 11 04:55:03 crc kubenswrapper[4651]: I1011 04:55:03.991279 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aa37954c-dd94-44fc-86e6-8fb23c429af1-catalog-content\") pod \"aa37954c-dd94-44fc-86e6-8fb23c429af1\" (UID: \"aa37954c-dd94-44fc-86e6-8fb23c429af1\") " Oct 11 04:55:03 crc kubenswrapper[4651]: I1011 04:55:03.991307 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aa37954c-dd94-44fc-86e6-8fb23c429af1-utilities\") pod \"aa37954c-dd94-44fc-86e6-8fb23c429af1\" (UID: \"aa37954c-dd94-44fc-86e6-8fb23c429af1\") " Oct 11 04:55:03 crc kubenswrapper[4651]: I1011 04:55:03.992374 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aa37954c-dd94-44fc-86e6-8fb23c429af1-utilities" (OuterVolumeSpecName: "utilities") pod "aa37954c-dd94-44fc-86e6-8fb23c429af1" (UID: "aa37954c-dd94-44fc-86e6-8fb23c429af1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 04:55:03 crc kubenswrapper[4651]: I1011 04:55:03.998325 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aa37954c-dd94-44fc-86e6-8fb23c429af1-kube-api-access-l8npx" (OuterVolumeSpecName: "kube-api-access-l8npx") pod "aa37954c-dd94-44fc-86e6-8fb23c429af1" (UID: "aa37954c-dd94-44fc-86e6-8fb23c429af1"). InnerVolumeSpecName "kube-api-access-l8npx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 04:55:03 crc kubenswrapper[4651]: I1011 04:55:03.998385 4651 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aa37954c-dd94-44fc-86e6-8fb23c429af1-utilities\") on node \"crc\" DevicePath \"\"" Oct 11 04:55:04 crc kubenswrapper[4651]: I1011 04:55:04.064372 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aa37954c-dd94-44fc-86e6-8fb23c429af1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "aa37954c-dd94-44fc-86e6-8fb23c429af1" (UID: "aa37954c-dd94-44fc-86e6-8fb23c429af1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 04:55:04 crc kubenswrapper[4651]: I1011 04:55:04.099781 4651 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aa37954c-dd94-44fc-86e6-8fb23c429af1-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 11 04:55:04 crc kubenswrapper[4651]: I1011 04:55:04.099834 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l8npx\" (UniqueName: \"kubernetes.io/projected/aa37954c-dd94-44fc-86e6-8fb23c429af1-kube-api-access-l8npx\") on node \"crc\" DevicePath \"\"" Oct 11 04:55:04 crc kubenswrapper[4651]: I1011 04:55:04.633976 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5cw4f" event={"ID":"aa37954c-dd94-44fc-86e6-8fb23c429af1","Type":"ContainerDied","Data":"afa84b6a8ac2bee8921945efc894fc3f37ebc7ba463c2955c633b7d6a35dcf9e"} Oct 11 04:55:04 crc kubenswrapper[4651]: I1011 04:55:04.634029 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5cw4f" Oct 11 04:55:04 crc kubenswrapper[4651]: I1011 04:55:04.634047 4651 scope.go:117] "RemoveContainer" containerID="b3f88d0b3b78c5e004814d06f1fb6dc985804875e7b56ca493b9431b419eb607" Oct 11 04:55:04 crc kubenswrapper[4651]: I1011 04:55:04.661785 4651 scope.go:117] "RemoveContainer" containerID="19a237e49261c4c890b74d23d94ea6c8cb6067418203fa98eca4975fcef10195" Oct 11 04:55:04 crc kubenswrapper[4651]: I1011 04:55:04.664092 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-5cw4f"] Oct 11 04:55:04 crc kubenswrapper[4651]: I1011 04:55:04.666816 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-5cw4f"] Oct 11 04:55:04 crc kubenswrapper[4651]: I1011 04:55:04.699680 4651 scope.go:117] "RemoveContainer" containerID="194b8b052443fb4027a49077094ad9abc23b6de22400e4571828348c5f09099a" Oct 11 04:55:05 crc kubenswrapper[4651]: I1011 04:55:05.881996 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aa37954c-dd94-44fc-86e6-8fb23c429af1" path="/var/lib/kubelet/pods/aa37954c-dd94-44fc-86e6-8fb23c429af1/volumes" Oct 11 04:55:24 crc kubenswrapper[4651]: I1011 04:55:24.804782 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-zqjmp" podUID="107de3f1-b5a8-41e4-bb3b-a34e4e916390" containerName="oauth-openshift" containerID="cri-o://fd951d9b62c7c02273d67aa0f4781137f840877fede418b2fdcd4ccb35e554ea" gracePeriod=15 Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.184886 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-zqjmp" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.241358 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-84cc499644-xqjlq"] Oct 11 04:55:25 crc kubenswrapper[4651]: E1011 04:55:25.241553 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fb3ff40a-c2bf-42d3-920b-b94751bed645" containerName="extract-utilities" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.241564 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="fb3ff40a-c2bf-42d3-920b-b94751bed645" containerName="extract-utilities" Oct 11 04:55:25 crc kubenswrapper[4651]: E1011 04:55:25.241575 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa37954c-dd94-44fc-86e6-8fb23c429af1" containerName="registry-server" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.241581 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa37954c-dd94-44fc-86e6-8fb23c429af1" containerName="registry-server" Oct 11 04:55:25 crc kubenswrapper[4651]: E1011 04:55:25.241593 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd3f200d-51eb-4758-b64a-10e9c8e36b65" containerName="extract-utilities" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.241599 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd3f200d-51eb-4758-b64a-10e9c8e36b65" containerName="extract-utilities" Oct 11 04:55:25 crc kubenswrapper[4651]: E1011 04:55:25.241612 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd3f200d-51eb-4758-b64a-10e9c8e36b65" containerName="registry-server" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.241617 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd3f200d-51eb-4758-b64a-10e9c8e36b65" containerName="registry-server" Oct 11 04:55:25 crc kubenswrapper[4651]: E1011 04:55:25.241625 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fb3ff40a-c2bf-42d3-920b-b94751bed645" containerName="extract-content" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.241632 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="fb3ff40a-c2bf-42d3-920b-b94751bed645" containerName="extract-content" Oct 11 04:55:25 crc kubenswrapper[4651]: E1011 04:55:25.241640 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd02099e-6a9d-425a-ac83-897f19a1007d" containerName="extract-content" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.241646 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd02099e-6a9d-425a-ac83-897f19a1007d" containerName="extract-content" Oct 11 04:55:25 crc kubenswrapper[4651]: E1011 04:55:25.241653 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd02099e-6a9d-425a-ac83-897f19a1007d" containerName="extract-utilities" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.241658 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd02099e-6a9d-425a-ac83-897f19a1007d" containerName="extract-utilities" Oct 11 04:55:25 crc kubenswrapper[4651]: E1011 04:55:25.241668 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="107de3f1-b5a8-41e4-bb3b-a34e4e916390" containerName="oauth-openshift" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.241673 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="107de3f1-b5a8-41e4-bb3b-a34e4e916390" containerName="oauth-openshift" Oct 11 04:55:25 crc kubenswrapper[4651]: E1011 04:55:25.241680 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa37954c-dd94-44fc-86e6-8fb23c429af1" containerName="extract-content" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.241686 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa37954c-dd94-44fc-86e6-8fb23c429af1" containerName="extract-content" Oct 11 04:55:25 crc kubenswrapper[4651]: E1011 04:55:25.241695 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd02099e-6a9d-425a-ac83-897f19a1007d" containerName="registry-server" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.241701 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd02099e-6a9d-425a-ac83-897f19a1007d" containerName="registry-server" Oct 11 04:55:25 crc kubenswrapper[4651]: E1011 04:55:25.241717 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fb3ff40a-c2bf-42d3-920b-b94751bed645" containerName="registry-server" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.241723 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="fb3ff40a-c2bf-42d3-920b-b94751bed645" containerName="registry-server" Oct 11 04:55:25 crc kubenswrapper[4651]: E1011 04:55:25.241731 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f02eb5ea-3d54-4eb5-b302-5d1a92d44921" containerName="pruner" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.241736 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="f02eb5ea-3d54-4eb5-b302-5d1a92d44921" containerName="pruner" Oct 11 04:55:25 crc kubenswrapper[4651]: E1011 04:55:25.241750 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd3f200d-51eb-4758-b64a-10e9c8e36b65" containerName="extract-content" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.241755 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd3f200d-51eb-4758-b64a-10e9c8e36b65" containerName="extract-content" Oct 11 04:55:25 crc kubenswrapper[4651]: E1011 04:55:25.241764 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa37954c-dd94-44fc-86e6-8fb23c429af1" containerName="extract-utilities" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.241770 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa37954c-dd94-44fc-86e6-8fb23c429af1" containerName="extract-utilities" Oct 11 04:55:25 crc kubenswrapper[4651]: E1011 04:55:25.241778 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9febfd36-febe-430d-9f97-9e3411b7155d" containerName="pruner" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.241784 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="9febfd36-febe-430d-9f97-9e3411b7155d" containerName="pruner" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.241891 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="aa37954c-dd94-44fc-86e6-8fb23c429af1" containerName="registry-server" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.241906 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="dd02099e-6a9d-425a-ac83-897f19a1007d" containerName="registry-server" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.241917 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="fb3ff40a-c2bf-42d3-920b-b94751bed645" containerName="registry-server" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.241925 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="f02eb5ea-3d54-4eb5-b302-5d1a92d44921" containerName="pruner" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.241931 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="dd3f200d-51eb-4758-b64a-10e9c8e36b65" containerName="registry-server" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.241938 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="107de3f1-b5a8-41e4-bb3b-a34e4e916390" containerName="oauth-openshift" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.241945 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="9febfd36-febe-430d-9f97-9e3411b7155d" containerName="pruner" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.242312 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-84cc499644-xqjlq" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.246471 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-84cc499644-xqjlq"] Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.303099 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/107de3f1-b5a8-41e4-bb3b-a34e4e916390-v4-0-config-user-template-login\") pod \"107de3f1-b5a8-41e4-bb3b-a34e4e916390\" (UID: \"107de3f1-b5a8-41e4-bb3b-a34e4e916390\") " Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.303150 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/107de3f1-b5a8-41e4-bb3b-a34e4e916390-audit-policies\") pod \"107de3f1-b5a8-41e4-bb3b-a34e4e916390\" (UID: \"107de3f1-b5a8-41e4-bb3b-a34e4e916390\") " Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.303178 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/107de3f1-b5a8-41e4-bb3b-a34e4e916390-v4-0-config-system-cliconfig\") pod \"107de3f1-b5a8-41e4-bb3b-a34e4e916390\" (UID: \"107de3f1-b5a8-41e4-bb3b-a34e4e916390\") " Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.303200 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/107de3f1-b5a8-41e4-bb3b-a34e4e916390-v4-0-config-user-template-error\") pod \"107de3f1-b5a8-41e4-bb3b-a34e4e916390\" (UID: \"107de3f1-b5a8-41e4-bb3b-a34e4e916390\") " Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.303223 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/107de3f1-b5a8-41e4-bb3b-a34e4e916390-v4-0-config-system-session\") pod \"107de3f1-b5a8-41e4-bb3b-a34e4e916390\" (UID: \"107de3f1-b5a8-41e4-bb3b-a34e4e916390\") " Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.303243 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/107de3f1-b5a8-41e4-bb3b-a34e4e916390-v4-0-config-user-idp-0-file-data\") pod \"107de3f1-b5a8-41e4-bb3b-a34e4e916390\" (UID: \"107de3f1-b5a8-41e4-bb3b-a34e4e916390\") " Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.303375 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/107de3f1-b5a8-41e4-bb3b-a34e4e916390-audit-dir\") pod \"107de3f1-b5a8-41e4-bb3b-a34e4e916390\" (UID: \"107de3f1-b5a8-41e4-bb3b-a34e4e916390\") " Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.303400 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/107de3f1-b5a8-41e4-bb3b-a34e4e916390-v4-0-config-system-ocp-branding-template\") pod \"107de3f1-b5a8-41e4-bb3b-a34e4e916390\" (UID: \"107de3f1-b5a8-41e4-bb3b-a34e4e916390\") " Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.303415 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/107de3f1-b5a8-41e4-bb3b-a34e4e916390-v4-0-config-system-trusted-ca-bundle\") pod \"107de3f1-b5a8-41e4-bb3b-a34e4e916390\" (UID: \"107de3f1-b5a8-41e4-bb3b-a34e4e916390\") " Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.303452 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wppnl\" (UniqueName: \"kubernetes.io/projected/107de3f1-b5a8-41e4-bb3b-a34e4e916390-kube-api-access-wppnl\") pod \"107de3f1-b5a8-41e4-bb3b-a34e4e916390\" (UID: \"107de3f1-b5a8-41e4-bb3b-a34e4e916390\") " Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.303474 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/107de3f1-b5a8-41e4-bb3b-a34e4e916390-v4-0-config-system-router-certs\") pod \"107de3f1-b5a8-41e4-bb3b-a34e4e916390\" (UID: \"107de3f1-b5a8-41e4-bb3b-a34e4e916390\") " Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.303497 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/107de3f1-b5a8-41e4-bb3b-a34e4e916390-v4-0-config-system-service-ca\") pod \"107de3f1-b5a8-41e4-bb3b-a34e4e916390\" (UID: \"107de3f1-b5a8-41e4-bb3b-a34e4e916390\") " Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.303540 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/107de3f1-b5a8-41e4-bb3b-a34e4e916390-v4-0-config-system-serving-cert\") pod \"107de3f1-b5a8-41e4-bb3b-a34e4e916390\" (UID: \"107de3f1-b5a8-41e4-bb3b-a34e4e916390\") " Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.303565 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/107de3f1-b5a8-41e4-bb3b-a34e4e916390-v4-0-config-user-template-provider-selection\") pod \"107de3f1-b5a8-41e4-bb3b-a34e4e916390\" (UID: \"107de3f1-b5a8-41e4-bb3b-a34e4e916390\") " Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.303689 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/107de3f1-b5a8-41e4-bb3b-a34e4e916390-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "107de3f1-b5a8-41e4-bb3b-a34e4e916390" (UID: "107de3f1-b5a8-41e4-bb3b-a34e4e916390"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.304426 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/107de3f1-b5a8-41e4-bb3b-a34e4e916390-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "107de3f1-b5a8-41e4-bb3b-a34e4e916390" (UID: "107de3f1-b5a8-41e4-bb3b-a34e4e916390"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.304786 4651 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/107de3f1-b5a8-41e4-bb3b-a34e4e916390-audit-dir\") on node \"crc\" DevicePath \"\"" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.304802 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/107de3f1-b5a8-41e4-bb3b-a34e4e916390-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "107de3f1-b5a8-41e4-bb3b-a34e4e916390" (UID: "107de3f1-b5a8-41e4-bb3b-a34e4e916390"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.304850 4651 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/107de3f1-b5a8-41e4-bb3b-a34e4e916390-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.304903 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/107de3f1-b5a8-41e4-bb3b-a34e4e916390-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "107de3f1-b5a8-41e4-bb3b-a34e4e916390" (UID: "107de3f1-b5a8-41e4-bb3b-a34e4e916390"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.305414 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/107de3f1-b5a8-41e4-bb3b-a34e4e916390-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "107de3f1-b5a8-41e4-bb3b-a34e4e916390" (UID: "107de3f1-b5a8-41e4-bb3b-a34e4e916390"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.309854 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/107de3f1-b5a8-41e4-bb3b-a34e4e916390-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "107de3f1-b5a8-41e4-bb3b-a34e4e916390" (UID: "107de3f1-b5a8-41e4-bb3b-a34e4e916390"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.310382 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/107de3f1-b5a8-41e4-bb3b-a34e4e916390-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "107de3f1-b5a8-41e4-bb3b-a34e4e916390" (UID: "107de3f1-b5a8-41e4-bb3b-a34e4e916390"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.310571 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/107de3f1-b5a8-41e4-bb3b-a34e4e916390-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "107de3f1-b5a8-41e4-bb3b-a34e4e916390" (UID: "107de3f1-b5a8-41e4-bb3b-a34e4e916390"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.310782 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/107de3f1-b5a8-41e4-bb3b-a34e4e916390-kube-api-access-wppnl" (OuterVolumeSpecName: "kube-api-access-wppnl") pod "107de3f1-b5a8-41e4-bb3b-a34e4e916390" (UID: "107de3f1-b5a8-41e4-bb3b-a34e4e916390"). InnerVolumeSpecName "kube-api-access-wppnl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.311243 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/107de3f1-b5a8-41e4-bb3b-a34e4e916390-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "107de3f1-b5a8-41e4-bb3b-a34e4e916390" (UID: "107de3f1-b5a8-41e4-bb3b-a34e4e916390"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.311539 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/107de3f1-b5a8-41e4-bb3b-a34e4e916390-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "107de3f1-b5a8-41e4-bb3b-a34e4e916390" (UID: "107de3f1-b5a8-41e4-bb3b-a34e4e916390"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.314384 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/107de3f1-b5a8-41e4-bb3b-a34e4e916390-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "107de3f1-b5a8-41e4-bb3b-a34e4e916390" (UID: "107de3f1-b5a8-41e4-bb3b-a34e4e916390"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.315559 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/107de3f1-b5a8-41e4-bb3b-a34e4e916390-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "107de3f1-b5a8-41e4-bb3b-a34e4e916390" (UID: "107de3f1-b5a8-41e4-bb3b-a34e4e916390"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.316217 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/107de3f1-b5a8-41e4-bb3b-a34e4e916390-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "107de3f1-b5a8-41e4-bb3b-a34e4e916390" (UID: "107de3f1-b5a8-41e4-bb3b-a34e4e916390"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.406011 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7ee6291a-dd71-4993-b8ff-806905f8548a-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-84cc499644-xqjlq\" (UID: \"7ee6291a-dd71-4993-b8ff-806905f8548a\") " pod="openshift-authentication/oauth-openshift-84cc499644-xqjlq" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.406383 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/7ee6291a-dd71-4993-b8ff-806905f8548a-v4-0-config-system-session\") pod \"oauth-openshift-84cc499644-xqjlq\" (UID: \"7ee6291a-dd71-4993-b8ff-806905f8548a\") " pod="openshift-authentication/oauth-openshift-84cc499644-xqjlq" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.406411 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/7ee6291a-dd71-4993-b8ff-806905f8548a-v4-0-config-user-template-login\") pod \"oauth-openshift-84cc499644-xqjlq\" (UID: \"7ee6291a-dd71-4993-b8ff-806905f8548a\") " pod="openshift-authentication/oauth-openshift-84cc499644-xqjlq" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.406446 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/7ee6291a-dd71-4993-b8ff-806905f8548a-v4-0-config-system-router-certs\") pod \"oauth-openshift-84cc499644-xqjlq\" (UID: \"7ee6291a-dd71-4993-b8ff-806905f8548a\") " pod="openshift-authentication/oauth-openshift-84cc499644-xqjlq" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.406471 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/7ee6291a-dd71-4993-b8ff-806905f8548a-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-84cc499644-xqjlq\" (UID: \"7ee6291a-dd71-4993-b8ff-806905f8548a\") " pod="openshift-authentication/oauth-openshift-84cc499644-xqjlq" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.406503 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/7ee6291a-dd71-4993-b8ff-806905f8548a-v4-0-config-user-template-error\") pod \"oauth-openshift-84cc499644-xqjlq\" (UID: \"7ee6291a-dd71-4993-b8ff-806905f8548a\") " pod="openshift-authentication/oauth-openshift-84cc499644-xqjlq" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.406530 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/7ee6291a-dd71-4993-b8ff-806905f8548a-v4-0-config-system-service-ca\") pod \"oauth-openshift-84cc499644-xqjlq\" (UID: \"7ee6291a-dd71-4993-b8ff-806905f8548a\") " pod="openshift-authentication/oauth-openshift-84cc499644-xqjlq" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.406555 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mqpvr\" (UniqueName: \"kubernetes.io/projected/7ee6291a-dd71-4993-b8ff-806905f8548a-kube-api-access-mqpvr\") pod \"oauth-openshift-84cc499644-xqjlq\" (UID: \"7ee6291a-dd71-4993-b8ff-806905f8548a\") " pod="openshift-authentication/oauth-openshift-84cc499644-xqjlq" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.406587 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/7ee6291a-dd71-4993-b8ff-806905f8548a-v4-0-config-system-serving-cert\") pod \"oauth-openshift-84cc499644-xqjlq\" (UID: \"7ee6291a-dd71-4993-b8ff-806905f8548a\") " pod="openshift-authentication/oauth-openshift-84cc499644-xqjlq" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.406622 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/7ee6291a-dd71-4993-b8ff-806905f8548a-audit-policies\") pod \"oauth-openshift-84cc499644-xqjlq\" (UID: \"7ee6291a-dd71-4993-b8ff-806905f8548a\") " pod="openshift-authentication/oauth-openshift-84cc499644-xqjlq" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.406647 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/7ee6291a-dd71-4993-b8ff-806905f8548a-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-84cc499644-xqjlq\" (UID: \"7ee6291a-dd71-4993-b8ff-806905f8548a\") " pod="openshift-authentication/oauth-openshift-84cc499644-xqjlq" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.406673 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/7ee6291a-dd71-4993-b8ff-806905f8548a-v4-0-config-system-cliconfig\") pod \"oauth-openshift-84cc499644-xqjlq\" (UID: \"7ee6291a-dd71-4993-b8ff-806905f8548a\") " pod="openshift-authentication/oauth-openshift-84cc499644-xqjlq" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.406710 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/7ee6291a-dd71-4993-b8ff-806905f8548a-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-84cc499644-xqjlq\" (UID: \"7ee6291a-dd71-4993-b8ff-806905f8548a\") " pod="openshift-authentication/oauth-openshift-84cc499644-xqjlq" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.406737 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/7ee6291a-dd71-4993-b8ff-806905f8548a-audit-dir\") pod \"oauth-openshift-84cc499644-xqjlq\" (UID: \"7ee6291a-dd71-4993-b8ff-806905f8548a\") " pod="openshift-authentication/oauth-openshift-84cc499644-xqjlq" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.406782 4651 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/107de3f1-b5a8-41e4-bb3b-a34e4e916390-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.406798 4651 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/107de3f1-b5a8-41e4-bb3b-a34e4e916390-audit-policies\") on node \"crc\" DevicePath \"\"" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.406812 4651 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/107de3f1-b5a8-41e4-bb3b-a34e4e916390-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.406842 4651 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/107de3f1-b5a8-41e4-bb3b-a34e4e916390-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.406854 4651 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/107de3f1-b5a8-41e4-bb3b-a34e4e916390-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.406866 4651 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/107de3f1-b5a8-41e4-bb3b-a34e4e916390-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.406879 4651 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/107de3f1-b5a8-41e4-bb3b-a34e4e916390-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.406892 4651 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/107de3f1-b5a8-41e4-bb3b-a34e4e916390-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.406905 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wppnl\" (UniqueName: \"kubernetes.io/projected/107de3f1-b5a8-41e4-bb3b-a34e4e916390-kube-api-access-wppnl\") on node \"crc\" DevicePath \"\"" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.406917 4651 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/107de3f1-b5a8-41e4-bb3b-a34e4e916390-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.406930 4651 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/107de3f1-b5a8-41e4-bb3b-a34e4e916390-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.406944 4651 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/107de3f1-b5a8-41e4-bb3b-a34e4e916390-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.507696 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/7ee6291a-dd71-4993-b8ff-806905f8548a-v4-0-config-system-serving-cert\") pod \"oauth-openshift-84cc499644-xqjlq\" (UID: \"7ee6291a-dd71-4993-b8ff-806905f8548a\") " pod="openshift-authentication/oauth-openshift-84cc499644-xqjlq" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.507762 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/7ee6291a-dd71-4993-b8ff-806905f8548a-audit-policies\") pod \"oauth-openshift-84cc499644-xqjlq\" (UID: \"7ee6291a-dd71-4993-b8ff-806905f8548a\") " pod="openshift-authentication/oauth-openshift-84cc499644-xqjlq" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.507791 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/7ee6291a-dd71-4993-b8ff-806905f8548a-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-84cc499644-xqjlq\" (UID: \"7ee6291a-dd71-4993-b8ff-806905f8548a\") " pod="openshift-authentication/oauth-openshift-84cc499644-xqjlq" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.507836 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/7ee6291a-dd71-4993-b8ff-806905f8548a-v4-0-config-system-cliconfig\") pod \"oauth-openshift-84cc499644-xqjlq\" (UID: \"7ee6291a-dd71-4993-b8ff-806905f8548a\") " pod="openshift-authentication/oauth-openshift-84cc499644-xqjlq" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.507870 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/7ee6291a-dd71-4993-b8ff-806905f8548a-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-84cc499644-xqjlq\" (UID: \"7ee6291a-dd71-4993-b8ff-806905f8548a\") " pod="openshift-authentication/oauth-openshift-84cc499644-xqjlq" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.507894 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/7ee6291a-dd71-4993-b8ff-806905f8548a-audit-dir\") pod \"oauth-openshift-84cc499644-xqjlq\" (UID: \"7ee6291a-dd71-4993-b8ff-806905f8548a\") " pod="openshift-authentication/oauth-openshift-84cc499644-xqjlq" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.507920 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7ee6291a-dd71-4993-b8ff-806905f8548a-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-84cc499644-xqjlq\" (UID: \"7ee6291a-dd71-4993-b8ff-806905f8548a\") " pod="openshift-authentication/oauth-openshift-84cc499644-xqjlq" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.507955 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/7ee6291a-dd71-4993-b8ff-806905f8548a-v4-0-config-system-session\") pod \"oauth-openshift-84cc499644-xqjlq\" (UID: \"7ee6291a-dd71-4993-b8ff-806905f8548a\") " pod="openshift-authentication/oauth-openshift-84cc499644-xqjlq" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.507979 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/7ee6291a-dd71-4993-b8ff-806905f8548a-v4-0-config-user-template-login\") pod \"oauth-openshift-84cc499644-xqjlq\" (UID: \"7ee6291a-dd71-4993-b8ff-806905f8548a\") " pod="openshift-authentication/oauth-openshift-84cc499644-xqjlq" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.508018 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/7ee6291a-dd71-4993-b8ff-806905f8548a-v4-0-config-system-router-certs\") pod \"oauth-openshift-84cc499644-xqjlq\" (UID: \"7ee6291a-dd71-4993-b8ff-806905f8548a\") " pod="openshift-authentication/oauth-openshift-84cc499644-xqjlq" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.508044 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/7ee6291a-dd71-4993-b8ff-806905f8548a-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-84cc499644-xqjlq\" (UID: \"7ee6291a-dd71-4993-b8ff-806905f8548a\") " pod="openshift-authentication/oauth-openshift-84cc499644-xqjlq" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.508079 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/7ee6291a-dd71-4993-b8ff-806905f8548a-v4-0-config-user-template-error\") pod \"oauth-openshift-84cc499644-xqjlq\" (UID: \"7ee6291a-dd71-4993-b8ff-806905f8548a\") " pod="openshift-authentication/oauth-openshift-84cc499644-xqjlq" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.508109 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/7ee6291a-dd71-4993-b8ff-806905f8548a-v4-0-config-system-service-ca\") pod \"oauth-openshift-84cc499644-xqjlq\" (UID: \"7ee6291a-dd71-4993-b8ff-806905f8548a\") " pod="openshift-authentication/oauth-openshift-84cc499644-xqjlq" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.508138 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mqpvr\" (UniqueName: \"kubernetes.io/projected/7ee6291a-dd71-4993-b8ff-806905f8548a-kube-api-access-mqpvr\") pod \"oauth-openshift-84cc499644-xqjlq\" (UID: \"7ee6291a-dd71-4993-b8ff-806905f8548a\") " pod="openshift-authentication/oauth-openshift-84cc499644-xqjlq" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.508693 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/7ee6291a-dd71-4993-b8ff-806905f8548a-audit-dir\") pod \"oauth-openshift-84cc499644-xqjlq\" (UID: \"7ee6291a-dd71-4993-b8ff-806905f8548a\") " pod="openshift-authentication/oauth-openshift-84cc499644-xqjlq" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.509097 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/7ee6291a-dd71-4993-b8ff-806905f8548a-v4-0-config-system-cliconfig\") pod \"oauth-openshift-84cc499644-xqjlq\" (UID: \"7ee6291a-dd71-4993-b8ff-806905f8548a\") " pod="openshift-authentication/oauth-openshift-84cc499644-xqjlq" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.509155 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/7ee6291a-dd71-4993-b8ff-806905f8548a-audit-policies\") pod \"oauth-openshift-84cc499644-xqjlq\" (UID: \"7ee6291a-dd71-4993-b8ff-806905f8548a\") " pod="openshift-authentication/oauth-openshift-84cc499644-xqjlq" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.509577 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7ee6291a-dd71-4993-b8ff-806905f8548a-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-84cc499644-xqjlq\" (UID: \"7ee6291a-dd71-4993-b8ff-806905f8548a\") " pod="openshift-authentication/oauth-openshift-84cc499644-xqjlq" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.509602 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/7ee6291a-dd71-4993-b8ff-806905f8548a-v4-0-config-system-service-ca\") pod \"oauth-openshift-84cc499644-xqjlq\" (UID: \"7ee6291a-dd71-4993-b8ff-806905f8548a\") " pod="openshift-authentication/oauth-openshift-84cc499644-xqjlq" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.511722 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/7ee6291a-dd71-4993-b8ff-806905f8548a-v4-0-config-system-serving-cert\") pod \"oauth-openshift-84cc499644-xqjlq\" (UID: \"7ee6291a-dd71-4993-b8ff-806905f8548a\") " pod="openshift-authentication/oauth-openshift-84cc499644-xqjlq" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.511847 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/7ee6291a-dd71-4993-b8ff-806905f8548a-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-84cc499644-xqjlq\" (UID: \"7ee6291a-dd71-4993-b8ff-806905f8548a\") " pod="openshift-authentication/oauth-openshift-84cc499644-xqjlq" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.511936 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/7ee6291a-dd71-4993-b8ff-806905f8548a-v4-0-config-system-session\") pod \"oauth-openshift-84cc499644-xqjlq\" (UID: \"7ee6291a-dd71-4993-b8ff-806905f8548a\") " pod="openshift-authentication/oauth-openshift-84cc499644-xqjlq" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.512521 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/7ee6291a-dd71-4993-b8ff-806905f8548a-v4-0-config-system-router-certs\") pod \"oauth-openshift-84cc499644-xqjlq\" (UID: \"7ee6291a-dd71-4993-b8ff-806905f8548a\") " pod="openshift-authentication/oauth-openshift-84cc499644-xqjlq" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.512793 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/7ee6291a-dd71-4993-b8ff-806905f8548a-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-84cc499644-xqjlq\" (UID: \"7ee6291a-dd71-4993-b8ff-806905f8548a\") " pod="openshift-authentication/oauth-openshift-84cc499644-xqjlq" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.514280 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/7ee6291a-dd71-4993-b8ff-806905f8548a-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-84cc499644-xqjlq\" (UID: \"7ee6291a-dd71-4993-b8ff-806905f8548a\") " pod="openshift-authentication/oauth-openshift-84cc499644-xqjlq" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.518440 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/7ee6291a-dd71-4993-b8ff-806905f8548a-v4-0-config-user-template-error\") pod \"oauth-openshift-84cc499644-xqjlq\" (UID: \"7ee6291a-dd71-4993-b8ff-806905f8548a\") " pod="openshift-authentication/oauth-openshift-84cc499644-xqjlq" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.518691 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/7ee6291a-dd71-4993-b8ff-806905f8548a-v4-0-config-user-template-login\") pod \"oauth-openshift-84cc499644-xqjlq\" (UID: \"7ee6291a-dd71-4993-b8ff-806905f8548a\") " pod="openshift-authentication/oauth-openshift-84cc499644-xqjlq" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.530491 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mqpvr\" (UniqueName: \"kubernetes.io/projected/7ee6291a-dd71-4993-b8ff-806905f8548a-kube-api-access-mqpvr\") pod \"oauth-openshift-84cc499644-xqjlq\" (UID: \"7ee6291a-dd71-4993-b8ff-806905f8548a\") " pod="openshift-authentication/oauth-openshift-84cc499644-xqjlq" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.567507 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-84cc499644-xqjlq" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.750114 4651 generic.go:334] "Generic (PLEG): container finished" podID="107de3f1-b5a8-41e4-bb3b-a34e4e916390" containerID="fd951d9b62c7c02273d67aa0f4781137f840877fede418b2fdcd4ccb35e554ea" exitCode=0 Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.750156 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-zqjmp" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.750164 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-zqjmp" event={"ID":"107de3f1-b5a8-41e4-bb3b-a34e4e916390","Type":"ContainerDied","Data":"fd951d9b62c7c02273d67aa0f4781137f840877fede418b2fdcd4ccb35e554ea"} Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.750189 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-zqjmp" event={"ID":"107de3f1-b5a8-41e4-bb3b-a34e4e916390","Type":"ContainerDied","Data":"23094e7600c7d63ec080fb45512c04cf3c76f659db2de174e3d425c515f765be"} Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.750203 4651 scope.go:117] "RemoveContainer" containerID="fd951d9b62c7c02273d67aa0f4781137f840877fede418b2fdcd4ccb35e554ea" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.775603 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-zqjmp"] Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.778213 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-zqjmp"] Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.781624 4651 scope.go:117] "RemoveContainer" containerID="fd951d9b62c7c02273d67aa0f4781137f840877fede418b2fdcd4ccb35e554ea" Oct 11 04:55:25 crc kubenswrapper[4651]: E1011 04:55:25.782146 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fd951d9b62c7c02273d67aa0f4781137f840877fede418b2fdcd4ccb35e554ea\": container with ID starting with fd951d9b62c7c02273d67aa0f4781137f840877fede418b2fdcd4ccb35e554ea not found: ID does not exist" containerID="fd951d9b62c7c02273d67aa0f4781137f840877fede418b2fdcd4ccb35e554ea" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.782173 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fd951d9b62c7c02273d67aa0f4781137f840877fede418b2fdcd4ccb35e554ea"} err="failed to get container status \"fd951d9b62c7c02273d67aa0f4781137f840877fede418b2fdcd4ccb35e554ea\": rpc error: code = NotFound desc = could not find container \"fd951d9b62c7c02273d67aa0f4781137f840877fede418b2fdcd4ccb35e554ea\": container with ID starting with fd951d9b62c7c02273d67aa0f4781137f840877fede418b2fdcd4ccb35e554ea not found: ID does not exist" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.877016 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="107de3f1-b5a8-41e4-bb3b-a34e4e916390" path="/var/lib/kubelet/pods/107de3f1-b5a8-41e4-bb3b-a34e4e916390/volumes" Oct 11 04:55:25 crc kubenswrapper[4651]: I1011 04:55:25.946521 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-84cc499644-xqjlq"] Oct 11 04:55:26 crc kubenswrapper[4651]: I1011 04:55:26.761008 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-84cc499644-xqjlq" event={"ID":"7ee6291a-dd71-4993-b8ff-806905f8548a","Type":"ContainerStarted","Data":"2d23d6841782d73277b73dfa33b267733449fed07afee466eb4485ebafd75efc"} Oct 11 04:55:26 crc kubenswrapper[4651]: I1011 04:55:26.761058 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-84cc499644-xqjlq" event={"ID":"7ee6291a-dd71-4993-b8ff-806905f8548a","Type":"ContainerStarted","Data":"e3f88f2350f230aee84f2ab38da0aacf8c9859f9d5f80c6aac9f949044a74ac0"} Oct 11 04:55:26 crc kubenswrapper[4651]: I1011 04:55:26.761408 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-84cc499644-xqjlq" Oct 11 04:55:26 crc kubenswrapper[4651]: I1011 04:55:26.769524 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-84cc499644-xqjlq" Oct 11 04:55:26 crc kubenswrapper[4651]: I1011 04:55:26.777467 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-84cc499644-xqjlq" podStartSLOduration=27.777442437 podStartE2EDuration="27.777442437s" podCreationTimestamp="2025-10-11 04:54:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 04:55:26.77709199 +0000 UTC m=+247.673324826" watchObservedRunningTime="2025-10-11 04:55:26.777442437 +0000 UTC m=+247.673675293" Oct 11 04:55:57 crc kubenswrapper[4651]: I1011 04:55:57.305587 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-vtcqh"] Oct 11 04:55:57 crc kubenswrapper[4651]: I1011 04:55:57.306299 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-vtcqh" podUID="ad64884c-e97d-4dc2-8f86-a44c537f7068" containerName="registry-server" containerID="cri-o://32308b968e1b4ada23facceee4f34e59251a6ab99097a2c817b18b2f96d874c5" gracePeriod=30 Oct 11 04:55:57 crc kubenswrapper[4651]: I1011 04:55:57.331272 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-6gt5n"] Oct 11 04:55:57 crc kubenswrapper[4651]: I1011 04:55:57.331551 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-6gt5n" podUID="24573bf5-c576-4e14-b3ac-f33e6ca99af3" containerName="registry-server" containerID="cri-o://7e88c131e0f34e2ca13b70b0058e9681ced1639f903478939165a46abc2ac4e6" gracePeriod=30 Oct 11 04:55:57 crc kubenswrapper[4651]: I1011 04:55:57.349992 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-cd92z"] Oct 11 04:55:57 crc kubenswrapper[4651]: I1011 04:55:57.350517 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-cd92z" podUID="a25ac582-d0a6-4bd7-a9c9-dbed70086212" containerName="marketplace-operator" containerID="cri-o://1edf5f7f4b453b2ff313cb8ab6e5910898f8a8a011c5170d79f1f578e458516e" gracePeriod=30 Oct 11 04:55:57 crc kubenswrapper[4651]: I1011 04:55:57.354180 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-g9dtw"] Oct 11 04:55:57 crc kubenswrapper[4651]: I1011 04:55:57.354434 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-g9dtw" podUID="b23fe5ff-6a7c-4363-b1cd-25e74ba69e7c" containerName="registry-server" containerID="cri-o://db3802d8ac52a3253e593ac9f76da7ab912261b2ef7dc4ef872f6072420b8c03" gracePeriod=30 Oct 11 04:55:57 crc kubenswrapper[4651]: I1011 04:55:57.358296 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-c7pll"] Oct 11 04:55:57 crc kubenswrapper[4651]: I1011 04:55:57.359183 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-c7pll" Oct 11 04:55:57 crc kubenswrapper[4651]: I1011 04:55:57.362252 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-drmcl"] Oct 11 04:55:57 crc kubenswrapper[4651]: I1011 04:55:57.362515 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-drmcl" podUID="ac326965-3277-44c2-bd27-773586999e23" containerName="registry-server" containerID="cri-o://1d45773b7318f593d0a25680d4464831ab23d910dd4adceaa6f67558de857d35" gracePeriod=30 Oct 11 04:55:57 crc kubenswrapper[4651]: I1011 04:55:57.367018 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-c7pll"] Oct 11 04:55:57 crc kubenswrapper[4651]: I1011 04:55:57.542960 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/112a830a-ce46-4e30-8d29-10f0605944d9-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-c7pll\" (UID: \"112a830a-ce46-4e30-8d29-10f0605944d9\") " pod="openshift-marketplace/marketplace-operator-79b997595-c7pll" Oct 11 04:55:57 crc kubenswrapper[4651]: I1011 04:55:57.543043 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p7s8m\" (UniqueName: \"kubernetes.io/projected/112a830a-ce46-4e30-8d29-10f0605944d9-kube-api-access-p7s8m\") pod \"marketplace-operator-79b997595-c7pll\" (UID: \"112a830a-ce46-4e30-8d29-10f0605944d9\") " pod="openshift-marketplace/marketplace-operator-79b997595-c7pll" Oct 11 04:55:57 crc kubenswrapper[4651]: I1011 04:55:57.543088 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/112a830a-ce46-4e30-8d29-10f0605944d9-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-c7pll\" (UID: \"112a830a-ce46-4e30-8d29-10f0605944d9\") " pod="openshift-marketplace/marketplace-operator-79b997595-c7pll" Oct 11 04:55:57 crc kubenswrapper[4651]: I1011 04:55:57.644251 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/112a830a-ce46-4e30-8d29-10f0605944d9-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-c7pll\" (UID: \"112a830a-ce46-4e30-8d29-10f0605944d9\") " pod="openshift-marketplace/marketplace-operator-79b997595-c7pll" Oct 11 04:55:57 crc kubenswrapper[4651]: I1011 04:55:57.644312 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/112a830a-ce46-4e30-8d29-10f0605944d9-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-c7pll\" (UID: \"112a830a-ce46-4e30-8d29-10f0605944d9\") " pod="openshift-marketplace/marketplace-operator-79b997595-c7pll" Oct 11 04:55:57 crc kubenswrapper[4651]: I1011 04:55:57.644353 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p7s8m\" (UniqueName: \"kubernetes.io/projected/112a830a-ce46-4e30-8d29-10f0605944d9-kube-api-access-p7s8m\") pod \"marketplace-operator-79b997595-c7pll\" (UID: \"112a830a-ce46-4e30-8d29-10f0605944d9\") " pod="openshift-marketplace/marketplace-operator-79b997595-c7pll" Oct 11 04:55:57 crc kubenswrapper[4651]: I1011 04:55:57.645438 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/112a830a-ce46-4e30-8d29-10f0605944d9-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-c7pll\" (UID: \"112a830a-ce46-4e30-8d29-10f0605944d9\") " pod="openshift-marketplace/marketplace-operator-79b997595-c7pll" Oct 11 04:55:57 crc kubenswrapper[4651]: I1011 04:55:57.650386 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/112a830a-ce46-4e30-8d29-10f0605944d9-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-c7pll\" (UID: \"112a830a-ce46-4e30-8d29-10f0605944d9\") " pod="openshift-marketplace/marketplace-operator-79b997595-c7pll" Oct 11 04:55:57 crc kubenswrapper[4651]: I1011 04:55:57.659402 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p7s8m\" (UniqueName: \"kubernetes.io/projected/112a830a-ce46-4e30-8d29-10f0605944d9-kube-api-access-p7s8m\") pod \"marketplace-operator-79b997595-c7pll\" (UID: \"112a830a-ce46-4e30-8d29-10f0605944d9\") " pod="openshift-marketplace/marketplace-operator-79b997595-c7pll" Oct 11 04:55:57 crc kubenswrapper[4651]: I1011 04:55:57.751674 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-c7pll" Oct 11 04:55:57 crc kubenswrapper[4651]: I1011 04:55:57.755543 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vtcqh" Oct 11 04:55:57 crc kubenswrapper[4651]: I1011 04:55:57.767263 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6gt5n" Oct 11 04:55:57 crc kubenswrapper[4651]: I1011 04:55:57.869728 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-cd92z" Oct 11 04:55:57 crc kubenswrapper[4651]: I1011 04:55:57.870515 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-drmcl" Oct 11 04:55:57 crc kubenswrapper[4651]: I1011 04:55:57.875026 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-g9dtw" Oct 11 04:55:57 crc kubenswrapper[4651]: I1011 04:55:57.947277 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/24573bf5-c576-4e14-b3ac-f33e6ca99af3-catalog-content\") pod \"24573bf5-c576-4e14-b3ac-f33e6ca99af3\" (UID: \"24573bf5-c576-4e14-b3ac-f33e6ca99af3\") " Oct 11 04:55:57 crc kubenswrapper[4651]: I1011 04:55:57.947375 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ad64884c-e97d-4dc2-8f86-a44c537f7068-utilities\") pod \"ad64884c-e97d-4dc2-8f86-a44c537f7068\" (UID: \"ad64884c-e97d-4dc2-8f86-a44c537f7068\") " Oct 11 04:55:57 crc kubenswrapper[4651]: I1011 04:55:57.947395 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/24573bf5-c576-4e14-b3ac-f33e6ca99af3-utilities\") pod \"24573bf5-c576-4e14-b3ac-f33e6ca99af3\" (UID: \"24573bf5-c576-4e14-b3ac-f33e6ca99af3\") " Oct 11 04:55:57 crc kubenswrapper[4651]: I1011 04:55:57.947436 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ad64884c-e97d-4dc2-8f86-a44c537f7068-catalog-content\") pod \"ad64884c-e97d-4dc2-8f86-a44c537f7068\" (UID: \"ad64884c-e97d-4dc2-8f86-a44c537f7068\") " Oct 11 04:55:57 crc kubenswrapper[4651]: I1011 04:55:57.947489 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-52sjt\" (UniqueName: \"kubernetes.io/projected/ad64884c-e97d-4dc2-8f86-a44c537f7068-kube-api-access-52sjt\") pod \"ad64884c-e97d-4dc2-8f86-a44c537f7068\" (UID: \"ad64884c-e97d-4dc2-8f86-a44c537f7068\") " Oct 11 04:55:57 crc kubenswrapper[4651]: I1011 04:55:57.947510 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-spgvj\" (UniqueName: \"kubernetes.io/projected/24573bf5-c576-4e14-b3ac-f33e6ca99af3-kube-api-access-spgvj\") pod \"24573bf5-c576-4e14-b3ac-f33e6ca99af3\" (UID: \"24573bf5-c576-4e14-b3ac-f33e6ca99af3\") " Oct 11 04:55:57 crc kubenswrapper[4651]: I1011 04:55:57.948658 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/24573bf5-c576-4e14-b3ac-f33e6ca99af3-utilities" (OuterVolumeSpecName: "utilities") pod "24573bf5-c576-4e14-b3ac-f33e6ca99af3" (UID: "24573bf5-c576-4e14-b3ac-f33e6ca99af3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 04:55:57 crc kubenswrapper[4651]: I1011 04:55:57.949081 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ad64884c-e97d-4dc2-8f86-a44c537f7068-utilities" (OuterVolumeSpecName: "utilities") pod "ad64884c-e97d-4dc2-8f86-a44c537f7068" (UID: "ad64884c-e97d-4dc2-8f86-a44c537f7068"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 04:55:57 crc kubenswrapper[4651]: I1011 04:55:57.951410 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ad64884c-e97d-4dc2-8f86-a44c537f7068-kube-api-access-52sjt" (OuterVolumeSpecName: "kube-api-access-52sjt") pod "ad64884c-e97d-4dc2-8f86-a44c537f7068" (UID: "ad64884c-e97d-4dc2-8f86-a44c537f7068"). InnerVolumeSpecName "kube-api-access-52sjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 04:55:57 crc kubenswrapper[4651]: I1011 04:55:57.951545 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/24573bf5-c576-4e14-b3ac-f33e6ca99af3-kube-api-access-spgvj" (OuterVolumeSpecName: "kube-api-access-spgvj") pod "24573bf5-c576-4e14-b3ac-f33e6ca99af3" (UID: "24573bf5-c576-4e14-b3ac-f33e6ca99af3"). InnerVolumeSpecName "kube-api-access-spgvj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 04:55:57 crc kubenswrapper[4651]: I1011 04:55:57.964256 4651 generic.go:334] "Generic (PLEG): container finished" podID="ad64884c-e97d-4dc2-8f86-a44c537f7068" containerID="32308b968e1b4ada23facceee4f34e59251a6ab99097a2c817b18b2f96d874c5" exitCode=0 Oct 11 04:55:57 crc kubenswrapper[4651]: I1011 04:55:57.964313 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vtcqh" event={"ID":"ad64884c-e97d-4dc2-8f86-a44c537f7068","Type":"ContainerDied","Data":"32308b968e1b4ada23facceee4f34e59251a6ab99097a2c817b18b2f96d874c5"} Oct 11 04:55:57 crc kubenswrapper[4651]: I1011 04:55:57.964338 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vtcqh" event={"ID":"ad64884c-e97d-4dc2-8f86-a44c537f7068","Type":"ContainerDied","Data":"755ac326c480dbfff8005d26f64ee1f01fad12a3bb64333a6e02077ba553f067"} Oct 11 04:55:57 crc kubenswrapper[4651]: I1011 04:55:57.964353 4651 scope.go:117] "RemoveContainer" containerID="32308b968e1b4ada23facceee4f34e59251a6ab99097a2c817b18b2f96d874c5" Oct 11 04:55:57 crc kubenswrapper[4651]: I1011 04:55:57.964455 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vtcqh" Oct 11 04:55:57 crc kubenswrapper[4651]: I1011 04:55:57.973300 4651 generic.go:334] "Generic (PLEG): container finished" podID="b23fe5ff-6a7c-4363-b1cd-25e74ba69e7c" containerID="db3802d8ac52a3253e593ac9f76da7ab912261b2ef7dc4ef872f6072420b8c03" exitCode=0 Oct 11 04:55:57 crc kubenswrapper[4651]: I1011 04:55:57.973376 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g9dtw" event={"ID":"b23fe5ff-6a7c-4363-b1cd-25e74ba69e7c","Type":"ContainerDied","Data":"db3802d8ac52a3253e593ac9f76da7ab912261b2ef7dc4ef872f6072420b8c03"} Oct 11 04:55:57 crc kubenswrapper[4651]: I1011 04:55:57.973400 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g9dtw" event={"ID":"b23fe5ff-6a7c-4363-b1cd-25e74ba69e7c","Type":"ContainerDied","Data":"25863fc04b9afcdccf244ecc5619709e1eff7d454370eec9ad19898afced76c1"} Oct 11 04:55:57 crc kubenswrapper[4651]: I1011 04:55:57.973474 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-g9dtw" Oct 11 04:55:57 crc kubenswrapper[4651]: I1011 04:55:57.977635 4651 generic.go:334] "Generic (PLEG): container finished" podID="24573bf5-c576-4e14-b3ac-f33e6ca99af3" containerID="7e88c131e0f34e2ca13b70b0058e9681ced1639f903478939165a46abc2ac4e6" exitCode=0 Oct 11 04:55:57 crc kubenswrapper[4651]: I1011 04:55:57.977680 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6gt5n" event={"ID":"24573bf5-c576-4e14-b3ac-f33e6ca99af3","Type":"ContainerDied","Data":"7e88c131e0f34e2ca13b70b0058e9681ced1639f903478939165a46abc2ac4e6"} Oct 11 04:55:57 crc kubenswrapper[4651]: I1011 04:55:57.977698 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6gt5n" event={"ID":"24573bf5-c576-4e14-b3ac-f33e6ca99af3","Type":"ContainerDied","Data":"76fa2e7548c10b5504dbb316b86be8c9354fb7e138be4d809a0bffe89c5d0b64"} Oct 11 04:55:57 crc kubenswrapper[4651]: I1011 04:55:57.978284 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6gt5n" Oct 11 04:55:57 crc kubenswrapper[4651]: I1011 04:55:57.992713 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-c7pll"] Oct 11 04:55:57 crc kubenswrapper[4651]: I1011 04:55:57.993005 4651 generic.go:334] "Generic (PLEG): container finished" podID="ac326965-3277-44c2-bd27-773586999e23" containerID="1d45773b7318f593d0a25680d4464831ab23d910dd4adceaa6f67558de857d35" exitCode=0 Oct 11 04:55:57 crc kubenswrapper[4651]: I1011 04:55:57.993196 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-drmcl" Oct 11 04:55:57 crc kubenswrapper[4651]: I1011 04:55:57.993356 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-drmcl" event={"ID":"ac326965-3277-44c2-bd27-773586999e23","Type":"ContainerDied","Data":"1d45773b7318f593d0a25680d4464831ab23d910dd4adceaa6f67558de857d35"} Oct 11 04:55:57 crc kubenswrapper[4651]: I1011 04:55:57.993407 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-drmcl" event={"ID":"ac326965-3277-44c2-bd27-773586999e23","Type":"ContainerDied","Data":"454c3d927fbdfb6dad62e6141067385a7f807195b23c4c262365a88b5cfb6bbf"} Oct 11 04:55:57 crc kubenswrapper[4651]: I1011 04:55:57.995982 4651 generic.go:334] "Generic (PLEG): container finished" podID="a25ac582-d0a6-4bd7-a9c9-dbed70086212" containerID="1edf5f7f4b453b2ff313cb8ab6e5910898f8a8a011c5170d79f1f578e458516e" exitCode=0 Oct 11 04:55:57 crc kubenswrapper[4651]: I1011 04:55:57.996026 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-cd92z" event={"ID":"a25ac582-d0a6-4bd7-a9c9-dbed70086212","Type":"ContainerDied","Data":"1edf5f7f4b453b2ff313cb8ab6e5910898f8a8a011c5170d79f1f578e458516e"} Oct 11 04:55:57 crc kubenswrapper[4651]: I1011 04:55:57.996050 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-cd92z" event={"ID":"a25ac582-d0a6-4bd7-a9c9-dbed70086212","Type":"ContainerDied","Data":"9968b94eaba2f738b617766cfc9bae1b86327fdc22fa484823748347655cdb24"} Oct 11 04:55:57 crc kubenswrapper[4651]: I1011 04:55:57.996096 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-cd92z" Oct 11 04:55:58 crc kubenswrapper[4651]: I1011 04:55:58.005623 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ad64884c-e97d-4dc2-8f86-a44c537f7068-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ad64884c-e97d-4dc2-8f86-a44c537f7068" (UID: "ad64884c-e97d-4dc2-8f86-a44c537f7068"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 04:55:58 crc kubenswrapper[4651]: I1011 04:55:58.008404 4651 scope.go:117] "RemoveContainer" containerID="bfbbddbac19ada18a351a881fb377aa4fb5fd7fdb8a6befc81f103fb1013585c" Oct 11 04:55:58 crc kubenswrapper[4651]: I1011 04:55:58.022144 4651 scope.go:117] "RemoveContainer" containerID="8deb186874738aed7467d65fe7c04b36559ac552546fe6e7458ca26340b5f490" Oct 11 04:55:58 crc kubenswrapper[4651]: I1011 04:55:58.025002 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/24573bf5-c576-4e14-b3ac-f33e6ca99af3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "24573bf5-c576-4e14-b3ac-f33e6ca99af3" (UID: "24573bf5-c576-4e14-b3ac-f33e6ca99af3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 04:55:58 crc kubenswrapper[4651]: I1011 04:55:58.048837 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b23fe5ff-6a7c-4363-b1cd-25e74ba69e7c-utilities\") pod \"b23fe5ff-6a7c-4363-b1cd-25e74ba69e7c\" (UID: \"b23fe5ff-6a7c-4363-b1cd-25e74ba69e7c\") " Oct 11 04:55:58 crc kubenswrapper[4651]: I1011 04:55:58.048929 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b23fe5ff-6a7c-4363-b1cd-25e74ba69e7c-catalog-content\") pod \"b23fe5ff-6a7c-4363-b1cd-25e74ba69e7c\" (UID: \"b23fe5ff-6a7c-4363-b1cd-25e74ba69e7c\") " Oct 11 04:55:58 crc kubenswrapper[4651]: I1011 04:55:58.048953 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cwlps\" (UniqueName: \"kubernetes.io/projected/a25ac582-d0a6-4bd7-a9c9-dbed70086212-kube-api-access-cwlps\") pod \"a25ac582-d0a6-4bd7-a9c9-dbed70086212\" (UID: \"a25ac582-d0a6-4bd7-a9c9-dbed70086212\") " Oct 11 04:55:58 crc kubenswrapper[4651]: I1011 04:55:58.048999 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-64r9s\" (UniqueName: \"kubernetes.io/projected/b23fe5ff-6a7c-4363-b1cd-25e74ba69e7c-kube-api-access-64r9s\") pod \"b23fe5ff-6a7c-4363-b1cd-25e74ba69e7c\" (UID: \"b23fe5ff-6a7c-4363-b1cd-25e74ba69e7c\") " Oct 11 04:55:58 crc kubenswrapper[4651]: I1011 04:55:58.049039 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/a25ac582-d0a6-4bd7-a9c9-dbed70086212-marketplace-operator-metrics\") pod \"a25ac582-d0a6-4bd7-a9c9-dbed70086212\" (UID: \"a25ac582-d0a6-4bd7-a9c9-dbed70086212\") " Oct 11 04:55:58 crc kubenswrapper[4651]: I1011 04:55:58.049079 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a25ac582-d0a6-4bd7-a9c9-dbed70086212-marketplace-trusted-ca\") pod \"a25ac582-d0a6-4bd7-a9c9-dbed70086212\" (UID: \"a25ac582-d0a6-4bd7-a9c9-dbed70086212\") " Oct 11 04:55:58 crc kubenswrapper[4651]: I1011 04:55:58.049095 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ac326965-3277-44c2-bd27-773586999e23-catalog-content\") pod \"ac326965-3277-44c2-bd27-773586999e23\" (UID: \"ac326965-3277-44c2-bd27-773586999e23\") " Oct 11 04:55:58 crc kubenswrapper[4651]: I1011 04:55:58.049147 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-krssl\" (UniqueName: \"kubernetes.io/projected/ac326965-3277-44c2-bd27-773586999e23-kube-api-access-krssl\") pod \"ac326965-3277-44c2-bd27-773586999e23\" (UID: \"ac326965-3277-44c2-bd27-773586999e23\") " Oct 11 04:55:58 crc kubenswrapper[4651]: I1011 04:55:58.049193 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ac326965-3277-44c2-bd27-773586999e23-utilities\") pod \"ac326965-3277-44c2-bd27-773586999e23\" (UID: \"ac326965-3277-44c2-bd27-773586999e23\") " Oct 11 04:55:58 crc kubenswrapper[4651]: I1011 04:55:58.049474 4651 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ad64884c-e97d-4dc2-8f86-a44c537f7068-utilities\") on node \"crc\" DevicePath \"\"" Oct 11 04:55:58 crc kubenswrapper[4651]: I1011 04:55:58.049491 4651 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/24573bf5-c576-4e14-b3ac-f33e6ca99af3-utilities\") on node \"crc\" DevicePath \"\"" Oct 11 04:55:58 crc kubenswrapper[4651]: I1011 04:55:58.049501 4651 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ad64884c-e97d-4dc2-8f86-a44c537f7068-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 11 04:55:58 crc kubenswrapper[4651]: I1011 04:55:58.049511 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-52sjt\" (UniqueName: \"kubernetes.io/projected/ad64884c-e97d-4dc2-8f86-a44c537f7068-kube-api-access-52sjt\") on node \"crc\" DevicePath \"\"" Oct 11 04:55:58 crc kubenswrapper[4651]: I1011 04:55:58.049519 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-spgvj\" (UniqueName: \"kubernetes.io/projected/24573bf5-c576-4e14-b3ac-f33e6ca99af3-kube-api-access-spgvj\") on node \"crc\" DevicePath \"\"" Oct 11 04:55:58 crc kubenswrapper[4651]: I1011 04:55:58.049543 4651 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/24573bf5-c576-4e14-b3ac-f33e6ca99af3-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 11 04:55:58 crc kubenswrapper[4651]: I1011 04:55:58.050136 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a25ac582-d0a6-4bd7-a9c9-dbed70086212-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "a25ac582-d0a6-4bd7-a9c9-dbed70086212" (UID: "a25ac582-d0a6-4bd7-a9c9-dbed70086212"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 04:55:58 crc kubenswrapper[4651]: I1011 04:55:58.050292 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ac326965-3277-44c2-bd27-773586999e23-utilities" (OuterVolumeSpecName: "utilities") pod "ac326965-3277-44c2-bd27-773586999e23" (UID: "ac326965-3277-44c2-bd27-773586999e23"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 04:55:58 crc kubenswrapper[4651]: I1011 04:55:58.069013 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b23fe5ff-6a7c-4363-b1cd-25e74ba69e7c-utilities" (OuterVolumeSpecName: "utilities") pod "b23fe5ff-6a7c-4363-b1cd-25e74ba69e7c" (UID: "b23fe5ff-6a7c-4363-b1cd-25e74ba69e7c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 04:55:58 crc kubenswrapper[4651]: I1011 04:55:58.070316 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b23fe5ff-6a7c-4363-b1cd-25e74ba69e7c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b23fe5ff-6a7c-4363-b1cd-25e74ba69e7c" (UID: "b23fe5ff-6a7c-4363-b1cd-25e74ba69e7c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 04:55:58 crc kubenswrapper[4651]: I1011 04:55:58.070863 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b23fe5ff-6a7c-4363-b1cd-25e74ba69e7c-kube-api-access-64r9s" (OuterVolumeSpecName: "kube-api-access-64r9s") pod "b23fe5ff-6a7c-4363-b1cd-25e74ba69e7c" (UID: "b23fe5ff-6a7c-4363-b1cd-25e74ba69e7c"). InnerVolumeSpecName "kube-api-access-64r9s". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 04:55:58 crc kubenswrapper[4651]: I1011 04:55:58.070952 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ac326965-3277-44c2-bd27-773586999e23-kube-api-access-krssl" (OuterVolumeSpecName: "kube-api-access-krssl") pod "ac326965-3277-44c2-bd27-773586999e23" (UID: "ac326965-3277-44c2-bd27-773586999e23"). InnerVolumeSpecName "kube-api-access-krssl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 04:55:58 crc kubenswrapper[4651]: I1011 04:55:58.071428 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a25ac582-d0a6-4bd7-a9c9-dbed70086212-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "a25ac582-d0a6-4bd7-a9c9-dbed70086212" (UID: "a25ac582-d0a6-4bd7-a9c9-dbed70086212"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 04:55:58 crc kubenswrapper[4651]: I1011 04:55:58.071452 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a25ac582-d0a6-4bd7-a9c9-dbed70086212-kube-api-access-cwlps" (OuterVolumeSpecName: "kube-api-access-cwlps") pod "a25ac582-d0a6-4bd7-a9c9-dbed70086212" (UID: "a25ac582-d0a6-4bd7-a9c9-dbed70086212"). InnerVolumeSpecName "kube-api-access-cwlps". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 04:55:58 crc kubenswrapper[4651]: I1011 04:55:58.071906 4651 scope.go:117] "RemoveContainer" containerID="32308b968e1b4ada23facceee4f34e59251a6ab99097a2c817b18b2f96d874c5" Oct 11 04:55:58 crc kubenswrapper[4651]: E1011 04:55:58.072569 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"32308b968e1b4ada23facceee4f34e59251a6ab99097a2c817b18b2f96d874c5\": container with ID starting with 32308b968e1b4ada23facceee4f34e59251a6ab99097a2c817b18b2f96d874c5 not found: ID does not exist" containerID="32308b968e1b4ada23facceee4f34e59251a6ab99097a2c817b18b2f96d874c5" Oct 11 04:55:58 crc kubenswrapper[4651]: I1011 04:55:58.072606 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"32308b968e1b4ada23facceee4f34e59251a6ab99097a2c817b18b2f96d874c5"} err="failed to get container status \"32308b968e1b4ada23facceee4f34e59251a6ab99097a2c817b18b2f96d874c5\": rpc error: code = NotFound desc = could not find container \"32308b968e1b4ada23facceee4f34e59251a6ab99097a2c817b18b2f96d874c5\": container with ID starting with 32308b968e1b4ada23facceee4f34e59251a6ab99097a2c817b18b2f96d874c5 not found: ID does not exist" Oct 11 04:55:58 crc kubenswrapper[4651]: I1011 04:55:58.072632 4651 scope.go:117] "RemoveContainer" containerID="bfbbddbac19ada18a351a881fb377aa4fb5fd7fdb8a6befc81f103fb1013585c" Oct 11 04:55:58 crc kubenswrapper[4651]: E1011 04:55:58.073136 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bfbbddbac19ada18a351a881fb377aa4fb5fd7fdb8a6befc81f103fb1013585c\": container with ID starting with bfbbddbac19ada18a351a881fb377aa4fb5fd7fdb8a6befc81f103fb1013585c not found: ID does not exist" containerID="bfbbddbac19ada18a351a881fb377aa4fb5fd7fdb8a6befc81f103fb1013585c" Oct 11 04:55:58 crc kubenswrapper[4651]: I1011 04:55:58.073172 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bfbbddbac19ada18a351a881fb377aa4fb5fd7fdb8a6befc81f103fb1013585c"} err="failed to get container status \"bfbbddbac19ada18a351a881fb377aa4fb5fd7fdb8a6befc81f103fb1013585c\": rpc error: code = NotFound desc = could not find container \"bfbbddbac19ada18a351a881fb377aa4fb5fd7fdb8a6befc81f103fb1013585c\": container with ID starting with bfbbddbac19ada18a351a881fb377aa4fb5fd7fdb8a6befc81f103fb1013585c not found: ID does not exist" Oct 11 04:55:58 crc kubenswrapper[4651]: I1011 04:55:58.073198 4651 scope.go:117] "RemoveContainer" containerID="8deb186874738aed7467d65fe7c04b36559ac552546fe6e7458ca26340b5f490" Oct 11 04:55:58 crc kubenswrapper[4651]: E1011 04:55:58.073541 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8deb186874738aed7467d65fe7c04b36559ac552546fe6e7458ca26340b5f490\": container with ID starting with 8deb186874738aed7467d65fe7c04b36559ac552546fe6e7458ca26340b5f490 not found: ID does not exist" containerID="8deb186874738aed7467d65fe7c04b36559ac552546fe6e7458ca26340b5f490" Oct 11 04:55:58 crc kubenswrapper[4651]: I1011 04:55:58.073583 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8deb186874738aed7467d65fe7c04b36559ac552546fe6e7458ca26340b5f490"} err="failed to get container status \"8deb186874738aed7467d65fe7c04b36559ac552546fe6e7458ca26340b5f490\": rpc error: code = NotFound desc = could not find container \"8deb186874738aed7467d65fe7c04b36559ac552546fe6e7458ca26340b5f490\": container with ID starting with 8deb186874738aed7467d65fe7c04b36559ac552546fe6e7458ca26340b5f490 not found: ID does not exist" Oct 11 04:55:58 crc kubenswrapper[4651]: I1011 04:55:58.073614 4651 scope.go:117] "RemoveContainer" containerID="db3802d8ac52a3253e593ac9f76da7ab912261b2ef7dc4ef872f6072420b8c03" Oct 11 04:55:58 crc kubenswrapper[4651]: I1011 04:55:58.086711 4651 scope.go:117] "RemoveContainer" containerID="10e5c5761162b859828229031abd412e75c1f97938c3746f9f534c216115dbe3" Oct 11 04:55:58 crc kubenswrapper[4651]: I1011 04:55:58.101050 4651 scope.go:117] "RemoveContainer" containerID="f204696e045b631c78308169b24f5d53b75615ed904b895ac72aa66bd28fb71e" Oct 11 04:55:58 crc kubenswrapper[4651]: I1011 04:55:58.123955 4651 scope.go:117] "RemoveContainer" containerID="db3802d8ac52a3253e593ac9f76da7ab912261b2ef7dc4ef872f6072420b8c03" Oct 11 04:55:58 crc kubenswrapper[4651]: E1011 04:55:58.124346 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"db3802d8ac52a3253e593ac9f76da7ab912261b2ef7dc4ef872f6072420b8c03\": container with ID starting with db3802d8ac52a3253e593ac9f76da7ab912261b2ef7dc4ef872f6072420b8c03 not found: ID does not exist" containerID="db3802d8ac52a3253e593ac9f76da7ab912261b2ef7dc4ef872f6072420b8c03" Oct 11 04:55:58 crc kubenswrapper[4651]: I1011 04:55:58.124375 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"db3802d8ac52a3253e593ac9f76da7ab912261b2ef7dc4ef872f6072420b8c03"} err="failed to get container status \"db3802d8ac52a3253e593ac9f76da7ab912261b2ef7dc4ef872f6072420b8c03\": rpc error: code = NotFound desc = could not find container \"db3802d8ac52a3253e593ac9f76da7ab912261b2ef7dc4ef872f6072420b8c03\": container with ID starting with db3802d8ac52a3253e593ac9f76da7ab912261b2ef7dc4ef872f6072420b8c03 not found: ID does not exist" Oct 11 04:55:58 crc kubenswrapper[4651]: I1011 04:55:58.124397 4651 scope.go:117] "RemoveContainer" containerID="10e5c5761162b859828229031abd412e75c1f97938c3746f9f534c216115dbe3" Oct 11 04:55:58 crc kubenswrapper[4651]: E1011 04:55:58.124602 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"10e5c5761162b859828229031abd412e75c1f97938c3746f9f534c216115dbe3\": container with ID starting with 10e5c5761162b859828229031abd412e75c1f97938c3746f9f534c216115dbe3 not found: ID does not exist" containerID="10e5c5761162b859828229031abd412e75c1f97938c3746f9f534c216115dbe3" Oct 11 04:55:58 crc kubenswrapper[4651]: I1011 04:55:58.124623 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"10e5c5761162b859828229031abd412e75c1f97938c3746f9f534c216115dbe3"} err="failed to get container status \"10e5c5761162b859828229031abd412e75c1f97938c3746f9f534c216115dbe3\": rpc error: code = NotFound desc = could not find container \"10e5c5761162b859828229031abd412e75c1f97938c3746f9f534c216115dbe3\": container with ID starting with 10e5c5761162b859828229031abd412e75c1f97938c3746f9f534c216115dbe3 not found: ID does not exist" Oct 11 04:55:58 crc kubenswrapper[4651]: I1011 04:55:58.124636 4651 scope.go:117] "RemoveContainer" containerID="f204696e045b631c78308169b24f5d53b75615ed904b895ac72aa66bd28fb71e" Oct 11 04:55:58 crc kubenswrapper[4651]: E1011 04:55:58.124917 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f204696e045b631c78308169b24f5d53b75615ed904b895ac72aa66bd28fb71e\": container with ID starting with f204696e045b631c78308169b24f5d53b75615ed904b895ac72aa66bd28fb71e not found: ID does not exist" containerID="f204696e045b631c78308169b24f5d53b75615ed904b895ac72aa66bd28fb71e" Oct 11 04:55:58 crc kubenswrapper[4651]: I1011 04:55:58.124942 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f204696e045b631c78308169b24f5d53b75615ed904b895ac72aa66bd28fb71e"} err="failed to get container status \"f204696e045b631c78308169b24f5d53b75615ed904b895ac72aa66bd28fb71e\": rpc error: code = NotFound desc = could not find container \"f204696e045b631c78308169b24f5d53b75615ed904b895ac72aa66bd28fb71e\": container with ID starting with f204696e045b631c78308169b24f5d53b75615ed904b895ac72aa66bd28fb71e not found: ID does not exist" Oct 11 04:55:58 crc kubenswrapper[4651]: I1011 04:55:58.124955 4651 scope.go:117] "RemoveContainer" containerID="7e88c131e0f34e2ca13b70b0058e9681ced1639f903478939165a46abc2ac4e6" Oct 11 04:55:58 crc kubenswrapper[4651]: I1011 04:55:58.137898 4651 scope.go:117] "RemoveContainer" containerID="e833e5a49b922dfe38820ce5d782ffb180be1111a0e6ba58c8968a1812652fd6" Oct 11 04:55:58 crc kubenswrapper[4651]: I1011 04:55:58.144835 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ac326965-3277-44c2-bd27-773586999e23-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ac326965-3277-44c2-bd27-773586999e23" (UID: "ac326965-3277-44c2-bd27-773586999e23"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 04:55:58 crc kubenswrapper[4651]: I1011 04:55:58.149620 4651 scope.go:117] "RemoveContainer" containerID="bb7e3d9d5d320cd868b383aa48ffd3078155e046637ba5f3c5d5d16f284000a2" Oct 11 04:55:58 crc kubenswrapper[4651]: I1011 04:55:58.150171 4651 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ac326965-3277-44c2-bd27-773586999e23-utilities\") on node \"crc\" DevicePath \"\"" Oct 11 04:55:58 crc kubenswrapper[4651]: I1011 04:55:58.150192 4651 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b23fe5ff-6a7c-4363-b1cd-25e74ba69e7c-utilities\") on node \"crc\" DevicePath \"\"" Oct 11 04:55:58 crc kubenswrapper[4651]: I1011 04:55:58.150203 4651 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b23fe5ff-6a7c-4363-b1cd-25e74ba69e7c-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 11 04:55:58 crc kubenswrapper[4651]: I1011 04:55:58.150216 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cwlps\" (UniqueName: \"kubernetes.io/projected/a25ac582-d0a6-4bd7-a9c9-dbed70086212-kube-api-access-cwlps\") on node \"crc\" DevicePath \"\"" Oct 11 04:55:58 crc kubenswrapper[4651]: I1011 04:55:58.150228 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-64r9s\" (UniqueName: \"kubernetes.io/projected/b23fe5ff-6a7c-4363-b1cd-25e74ba69e7c-kube-api-access-64r9s\") on node \"crc\" DevicePath \"\"" Oct 11 04:55:58 crc kubenswrapper[4651]: I1011 04:55:58.150241 4651 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/a25ac582-d0a6-4bd7-a9c9-dbed70086212-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Oct 11 04:55:58 crc kubenswrapper[4651]: I1011 04:55:58.150254 4651 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a25ac582-d0a6-4bd7-a9c9-dbed70086212-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 11 04:55:58 crc kubenswrapper[4651]: I1011 04:55:58.150264 4651 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ac326965-3277-44c2-bd27-773586999e23-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 11 04:55:58 crc kubenswrapper[4651]: I1011 04:55:58.150277 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-krssl\" (UniqueName: \"kubernetes.io/projected/ac326965-3277-44c2-bd27-773586999e23-kube-api-access-krssl\") on node \"crc\" DevicePath \"\"" Oct 11 04:55:58 crc kubenswrapper[4651]: I1011 04:55:58.161632 4651 scope.go:117] "RemoveContainer" containerID="7e88c131e0f34e2ca13b70b0058e9681ced1639f903478939165a46abc2ac4e6" Oct 11 04:55:58 crc kubenswrapper[4651]: E1011 04:55:58.162062 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7e88c131e0f34e2ca13b70b0058e9681ced1639f903478939165a46abc2ac4e6\": container with ID starting with 7e88c131e0f34e2ca13b70b0058e9681ced1639f903478939165a46abc2ac4e6 not found: ID does not exist" containerID="7e88c131e0f34e2ca13b70b0058e9681ced1639f903478939165a46abc2ac4e6" Oct 11 04:55:58 crc kubenswrapper[4651]: I1011 04:55:58.162091 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7e88c131e0f34e2ca13b70b0058e9681ced1639f903478939165a46abc2ac4e6"} err="failed to get container status \"7e88c131e0f34e2ca13b70b0058e9681ced1639f903478939165a46abc2ac4e6\": rpc error: code = NotFound desc = could not find container \"7e88c131e0f34e2ca13b70b0058e9681ced1639f903478939165a46abc2ac4e6\": container with ID starting with 7e88c131e0f34e2ca13b70b0058e9681ced1639f903478939165a46abc2ac4e6 not found: ID does not exist" Oct 11 04:55:58 crc kubenswrapper[4651]: I1011 04:55:58.162111 4651 scope.go:117] "RemoveContainer" containerID="e833e5a49b922dfe38820ce5d782ffb180be1111a0e6ba58c8968a1812652fd6" Oct 11 04:55:58 crc kubenswrapper[4651]: E1011 04:55:58.162460 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e833e5a49b922dfe38820ce5d782ffb180be1111a0e6ba58c8968a1812652fd6\": container with ID starting with e833e5a49b922dfe38820ce5d782ffb180be1111a0e6ba58c8968a1812652fd6 not found: ID does not exist" containerID="e833e5a49b922dfe38820ce5d782ffb180be1111a0e6ba58c8968a1812652fd6" Oct 11 04:55:58 crc kubenswrapper[4651]: I1011 04:55:58.162481 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e833e5a49b922dfe38820ce5d782ffb180be1111a0e6ba58c8968a1812652fd6"} err="failed to get container status \"e833e5a49b922dfe38820ce5d782ffb180be1111a0e6ba58c8968a1812652fd6\": rpc error: code = NotFound desc = could not find container \"e833e5a49b922dfe38820ce5d782ffb180be1111a0e6ba58c8968a1812652fd6\": container with ID starting with e833e5a49b922dfe38820ce5d782ffb180be1111a0e6ba58c8968a1812652fd6 not found: ID does not exist" Oct 11 04:55:58 crc kubenswrapper[4651]: I1011 04:55:58.162494 4651 scope.go:117] "RemoveContainer" containerID="bb7e3d9d5d320cd868b383aa48ffd3078155e046637ba5f3c5d5d16f284000a2" Oct 11 04:55:58 crc kubenswrapper[4651]: E1011 04:55:58.162697 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bb7e3d9d5d320cd868b383aa48ffd3078155e046637ba5f3c5d5d16f284000a2\": container with ID starting with bb7e3d9d5d320cd868b383aa48ffd3078155e046637ba5f3c5d5d16f284000a2 not found: ID does not exist" containerID="bb7e3d9d5d320cd868b383aa48ffd3078155e046637ba5f3c5d5d16f284000a2" Oct 11 04:55:58 crc kubenswrapper[4651]: I1011 04:55:58.162716 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bb7e3d9d5d320cd868b383aa48ffd3078155e046637ba5f3c5d5d16f284000a2"} err="failed to get container status \"bb7e3d9d5d320cd868b383aa48ffd3078155e046637ba5f3c5d5d16f284000a2\": rpc error: code = NotFound desc = could not find container \"bb7e3d9d5d320cd868b383aa48ffd3078155e046637ba5f3c5d5d16f284000a2\": container with ID starting with bb7e3d9d5d320cd868b383aa48ffd3078155e046637ba5f3c5d5d16f284000a2 not found: ID does not exist" Oct 11 04:55:58 crc kubenswrapper[4651]: I1011 04:55:58.162727 4651 scope.go:117] "RemoveContainer" containerID="1d45773b7318f593d0a25680d4464831ab23d910dd4adceaa6f67558de857d35" Oct 11 04:55:58 crc kubenswrapper[4651]: I1011 04:55:58.174558 4651 scope.go:117] "RemoveContainer" containerID="9e78bcadb7372b4d22b523f1b844f235bec688e5be55abfa8edf3ec2c42f62e4" Oct 11 04:55:58 crc kubenswrapper[4651]: I1011 04:55:58.200336 4651 scope.go:117] "RemoveContainer" containerID="5a8411747437478e0d9d62a2ae3f9e0eff4c20cf706b627e858a1b350da8b7ca" Oct 11 04:55:58 crc kubenswrapper[4651]: I1011 04:55:58.213166 4651 scope.go:117] "RemoveContainer" containerID="1d45773b7318f593d0a25680d4464831ab23d910dd4adceaa6f67558de857d35" Oct 11 04:55:58 crc kubenswrapper[4651]: E1011 04:55:58.213529 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1d45773b7318f593d0a25680d4464831ab23d910dd4adceaa6f67558de857d35\": container with ID starting with 1d45773b7318f593d0a25680d4464831ab23d910dd4adceaa6f67558de857d35 not found: ID does not exist" containerID="1d45773b7318f593d0a25680d4464831ab23d910dd4adceaa6f67558de857d35" Oct 11 04:55:58 crc kubenswrapper[4651]: I1011 04:55:58.213565 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1d45773b7318f593d0a25680d4464831ab23d910dd4adceaa6f67558de857d35"} err="failed to get container status \"1d45773b7318f593d0a25680d4464831ab23d910dd4adceaa6f67558de857d35\": rpc error: code = NotFound desc = could not find container \"1d45773b7318f593d0a25680d4464831ab23d910dd4adceaa6f67558de857d35\": container with ID starting with 1d45773b7318f593d0a25680d4464831ab23d910dd4adceaa6f67558de857d35 not found: ID does not exist" Oct 11 04:55:58 crc kubenswrapper[4651]: I1011 04:55:58.213592 4651 scope.go:117] "RemoveContainer" containerID="9e78bcadb7372b4d22b523f1b844f235bec688e5be55abfa8edf3ec2c42f62e4" Oct 11 04:55:58 crc kubenswrapper[4651]: E1011 04:55:58.214010 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9e78bcadb7372b4d22b523f1b844f235bec688e5be55abfa8edf3ec2c42f62e4\": container with ID starting with 9e78bcadb7372b4d22b523f1b844f235bec688e5be55abfa8edf3ec2c42f62e4 not found: ID does not exist" containerID="9e78bcadb7372b4d22b523f1b844f235bec688e5be55abfa8edf3ec2c42f62e4" Oct 11 04:55:58 crc kubenswrapper[4651]: I1011 04:55:58.214050 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9e78bcadb7372b4d22b523f1b844f235bec688e5be55abfa8edf3ec2c42f62e4"} err="failed to get container status \"9e78bcadb7372b4d22b523f1b844f235bec688e5be55abfa8edf3ec2c42f62e4\": rpc error: code = NotFound desc = could not find container \"9e78bcadb7372b4d22b523f1b844f235bec688e5be55abfa8edf3ec2c42f62e4\": container with ID starting with 9e78bcadb7372b4d22b523f1b844f235bec688e5be55abfa8edf3ec2c42f62e4 not found: ID does not exist" Oct 11 04:55:58 crc kubenswrapper[4651]: I1011 04:55:58.214077 4651 scope.go:117] "RemoveContainer" containerID="5a8411747437478e0d9d62a2ae3f9e0eff4c20cf706b627e858a1b350da8b7ca" Oct 11 04:55:58 crc kubenswrapper[4651]: E1011 04:55:58.214303 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5a8411747437478e0d9d62a2ae3f9e0eff4c20cf706b627e858a1b350da8b7ca\": container with ID starting with 5a8411747437478e0d9d62a2ae3f9e0eff4c20cf706b627e858a1b350da8b7ca not found: ID does not exist" containerID="5a8411747437478e0d9d62a2ae3f9e0eff4c20cf706b627e858a1b350da8b7ca" Oct 11 04:55:58 crc kubenswrapper[4651]: I1011 04:55:58.214326 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5a8411747437478e0d9d62a2ae3f9e0eff4c20cf706b627e858a1b350da8b7ca"} err="failed to get container status \"5a8411747437478e0d9d62a2ae3f9e0eff4c20cf706b627e858a1b350da8b7ca\": rpc error: code = NotFound desc = could not find container \"5a8411747437478e0d9d62a2ae3f9e0eff4c20cf706b627e858a1b350da8b7ca\": container with ID starting with 5a8411747437478e0d9d62a2ae3f9e0eff4c20cf706b627e858a1b350da8b7ca not found: ID does not exist" Oct 11 04:55:58 crc kubenswrapper[4651]: I1011 04:55:58.214339 4651 scope.go:117] "RemoveContainer" containerID="1edf5f7f4b453b2ff313cb8ab6e5910898f8a8a011c5170d79f1f578e458516e" Oct 11 04:55:58 crc kubenswrapper[4651]: I1011 04:55:58.224714 4651 scope.go:117] "RemoveContainer" containerID="1edf5f7f4b453b2ff313cb8ab6e5910898f8a8a011c5170d79f1f578e458516e" Oct 11 04:55:58 crc kubenswrapper[4651]: E1011 04:55:58.225145 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1edf5f7f4b453b2ff313cb8ab6e5910898f8a8a011c5170d79f1f578e458516e\": container with ID starting with 1edf5f7f4b453b2ff313cb8ab6e5910898f8a8a011c5170d79f1f578e458516e not found: ID does not exist" containerID="1edf5f7f4b453b2ff313cb8ab6e5910898f8a8a011c5170d79f1f578e458516e" Oct 11 04:55:58 crc kubenswrapper[4651]: I1011 04:55:58.225195 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1edf5f7f4b453b2ff313cb8ab6e5910898f8a8a011c5170d79f1f578e458516e"} err="failed to get container status \"1edf5f7f4b453b2ff313cb8ab6e5910898f8a8a011c5170d79f1f578e458516e\": rpc error: code = NotFound desc = could not find container \"1edf5f7f4b453b2ff313cb8ab6e5910898f8a8a011c5170d79f1f578e458516e\": container with ID starting with 1edf5f7f4b453b2ff313cb8ab6e5910898f8a8a011c5170d79f1f578e458516e not found: ID does not exist" Oct 11 04:55:58 crc kubenswrapper[4651]: I1011 04:55:58.305504 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-vtcqh"] Oct 11 04:55:58 crc kubenswrapper[4651]: I1011 04:55:58.315200 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-vtcqh"] Oct 11 04:55:58 crc kubenswrapper[4651]: I1011 04:55:58.320109 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-6gt5n"] Oct 11 04:55:58 crc kubenswrapper[4651]: I1011 04:55:58.322662 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-6gt5n"] Oct 11 04:55:58 crc kubenswrapper[4651]: I1011 04:55:58.325104 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-g9dtw"] Oct 11 04:55:58 crc kubenswrapper[4651]: I1011 04:55:58.327491 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-g9dtw"] Oct 11 04:55:58 crc kubenswrapper[4651]: I1011 04:55:58.335610 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-drmcl"] Oct 11 04:55:58 crc kubenswrapper[4651]: I1011 04:55:58.338373 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-drmcl"] Oct 11 04:55:58 crc kubenswrapper[4651]: I1011 04:55:58.344343 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-cd92z"] Oct 11 04:55:58 crc kubenswrapper[4651]: I1011 04:55:58.346833 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-cd92z"] Oct 11 04:55:59 crc kubenswrapper[4651]: I1011 04:55:59.006763 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-c7pll" event={"ID":"112a830a-ce46-4e30-8d29-10f0605944d9","Type":"ContainerStarted","Data":"fe313e7262a20ef0c77d9fba199fd49c76a03d8ec4c3475fedfe9e059dd58db5"} Oct 11 04:55:59 crc kubenswrapper[4651]: I1011 04:55:59.006813 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-c7pll" event={"ID":"112a830a-ce46-4e30-8d29-10f0605944d9","Type":"ContainerStarted","Data":"46ceb91119e2b84dcb40d13e09247bfede97b4b71f15d8ff540f3c23d6f192b8"} Oct 11 04:55:59 crc kubenswrapper[4651]: I1011 04:55:59.034114 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-c7pll" podStartSLOduration=2.034088456 podStartE2EDuration="2.034088456s" podCreationTimestamp="2025-10-11 04:55:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 04:55:59.031853681 +0000 UTC m=+279.928086527" watchObservedRunningTime="2025-10-11 04:55:59.034088456 +0000 UTC m=+279.930321282" Oct 11 04:55:59 crc kubenswrapper[4651]: I1011 04:55:59.526974 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-rvmdl"] Oct 11 04:55:59 crc kubenswrapper[4651]: E1011 04:55:59.527410 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac326965-3277-44c2-bd27-773586999e23" containerName="extract-content" Oct 11 04:55:59 crc kubenswrapper[4651]: I1011 04:55:59.527421 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac326965-3277-44c2-bd27-773586999e23" containerName="extract-content" Oct 11 04:55:59 crc kubenswrapper[4651]: E1011 04:55:59.527428 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b23fe5ff-6a7c-4363-b1cd-25e74ba69e7c" containerName="extract-utilities" Oct 11 04:55:59 crc kubenswrapper[4651]: I1011 04:55:59.527435 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="b23fe5ff-6a7c-4363-b1cd-25e74ba69e7c" containerName="extract-utilities" Oct 11 04:55:59 crc kubenswrapper[4651]: E1011 04:55:59.527446 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac326965-3277-44c2-bd27-773586999e23" containerName="registry-server" Oct 11 04:55:59 crc kubenswrapper[4651]: I1011 04:55:59.527453 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac326965-3277-44c2-bd27-773586999e23" containerName="registry-server" Oct 11 04:55:59 crc kubenswrapper[4651]: E1011 04:55:59.527463 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a25ac582-d0a6-4bd7-a9c9-dbed70086212" containerName="marketplace-operator" Oct 11 04:55:59 crc kubenswrapper[4651]: I1011 04:55:59.527470 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="a25ac582-d0a6-4bd7-a9c9-dbed70086212" containerName="marketplace-operator" Oct 11 04:55:59 crc kubenswrapper[4651]: E1011 04:55:59.527483 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="24573bf5-c576-4e14-b3ac-f33e6ca99af3" containerName="extract-utilities" Oct 11 04:55:59 crc kubenswrapper[4651]: I1011 04:55:59.527492 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="24573bf5-c576-4e14-b3ac-f33e6ca99af3" containerName="extract-utilities" Oct 11 04:55:59 crc kubenswrapper[4651]: E1011 04:55:59.527510 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="24573bf5-c576-4e14-b3ac-f33e6ca99af3" containerName="extract-content" Oct 11 04:55:59 crc kubenswrapper[4651]: I1011 04:55:59.527517 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="24573bf5-c576-4e14-b3ac-f33e6ca99af3" containerName="extract-content" Oct 11 04:55:59 crc kubenswrapper[4651]: E1011 04:55:59.527529 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b23fe5ff-6a7c-4363-b1cd-25e74ba69e7c" containerName="extract-content" Oct 11 04:55:59 crc kubenswrapper[4651]: I1011 04:55:59.527537 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="b23fe5ff-6a7c-4363-b1cd-25e74ba69e7c" containerName="extract-content" Oct 11 04:55:59 crc kubenswrapper[4651]: E1011 04:55:59.527546 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad64884c-e97d-4dc2-8f86-a44c537f7068" containerName="registry-server" Oct 11 04:55:59 crc kubenswrapper[4651]: I1011 04:55:59.527553 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad64884c-e97d-4dc2-8f86-a44c537f7068" containerName="registry-server" Oct 11 04:55:59 crc kubenswrapper[4651]: E1011 04:55:59.527563 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b23fe5ff-6a7c-4363-b1cd-25e74ba69e7c" containerName="registry-server" Oct 11 04:55:59 crc kubenswrapper[4651]: I1011 04:55:59.527570 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="b23fe5ff-6a7c-4363-b1cd-25e74ba69e7c" containerName="registry-server" Oct 11 04:55:59 crc kubenswrapper[4651]: E1011 04:55:59.527582 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad64884c-e97d-4dc2-8f86-a44c537f7068" containerName="extract-content" Oct 11 04:55:59 crc kubenswrapper[4651]: I1011 04:55:59.527588 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad64884c-e97d-4dc2-8f86-a44c537f7068" containerName="extract-content" Oct 11 04:55:59 crc kubenswrapper[4651]: E1011 04:55:59.527597 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="24573bf5-c576-4e14-b3ac-f33e6ca99af3" containerName="registry-server" Oct 11 04:55:59 crc kubenswrapper[4651]: I1011 04:55:59.527603 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="24573bf5-c576-4e14-b3ac-f33e6ca99af3" containerName="registry-server" Oct 11 04:55:59 crc kubenswrapper[4651]: E1011 04:55:59.527610 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad64884c-e97d-4dc2-8f86-a44c537f7068" containerName="extract-utilities" Oct 11 04:55:59 crc kubenswrapper[4651]: I1011 04:55:59.527618 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad64884c-e97d-4dc2-8f86-a44c537f7068" containerName="extract-utilities" Oct 11 04:55:59 crc kubenswrapper[4651]: E1011 04:55:59.527629 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac326965-3277-44c2-bd27-773586999e23" containerName="extract-utilities" Oct 11 04:55:59 crc kubenswrapper[4651]: I1011 04:55:59.527635 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac326965-3277-44c2-bd27-773586999e23" containerName="extract-utilities" Oct 11 04:55:59 crc kubenswrapper[4651]: I1011 04:55:59.527740 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="24573bf5-c576-4e14-b3ac-f33e6ca99af3" containerName="registry-server" Oct 11 04:55:59 crc kubenswrapper[4651]: I1011 04:55:59.527756 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="b23fe5ff-6a7c-4363-b1cd-25e74ba69e7c" containerName="registry-server" Oct 11 04:55:59 crc kubenswrapper[4651]: I1011 04:55:59.527767 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="a25ac582-d0a6-4bd7-a9c9-dbed70086212" containerName="marketplace-operator" Oct 11 04:55:59 crc kubenswrapper[4651]: I1011 04:55:59.527778 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad64884c-e97d-4dc2-8f86-a44c537f7068" containerName="registry-server" Oct 11 04:55:59 crc kubenswrapper[4651]: I1011 04:55:59.527789 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="ac326965-3277-44c2-bd27-773586999e23" containerName="registry-server" Oct 11 04:55:59 crc kubenswrapper[4651]: I1011 04:55:59.528909 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rvmdl" Oct 11 04:55:59 crc kubenswrapper[4651]: I1011 04:55:59.532381 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Oct 11 04:55:59 crc kubenswrapper[4651]: I1011 04:55:59.538689 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-rvmdl"] Oct 11 04:55:59 crc kubenswrapper[4651]: I1011 04:55:59.668195 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0591b55e-2399-450c-9738-6160d1d25ee1-utilities\") pod \"redhat-marketplace-rvmdl\" (UID: \"0591b55e-2399-450c-9738-6160d1d25ee1\") " pod="openshift-marketplace/redhat-marketplace-rvmdl" Oct 11 04:55:59 crc kubenswrapper[4651]: I1011 04:55:59.668249 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0591b55e-2399-450c-9738-6160d1d25ee1-catalog-content\") pod \"redhat-marketplace-rvmdl\" (UID: \"0591b55e-2399-450c-9738-6160d1d25ee1\") " pod="openshift-marketplace/redhat-marketplace-rvmdl" Oct 11 04:55:59 crc kubenswrapper[4651]: I1011 04:55:59.668274 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mn2p9\" (UniqueName: \"kubernetes.io/projected/0591b55e-2399-450c-9738-6160d1d25ee1-kube-api-access-mn2p9\") pod \"redhat-marketplace-rvmdl\" (UID: \"0591b55e-2399-450c-9738-6160d1d25ee1\") " pod="openshift-marketplace/redhat-marketplace-rvmdl" Oct 11 04:55:59 crc kubenswrapper[4651]: I1011 04:55:59.733463 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-bmwjb"] Oct 11 04:55:59 crc kubenswrapper[4651]: I1011 04:55:59.734955 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bmwjb" Oct 11 04:55:59 crc kubenswrapper[4651]: I1011 04:55:59.736795 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Oct 11 04:55:59 crc kubenswrapper[4651]: I1011 04:55:59.742578 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-bmwjb"] Oct 11 04:55:59 crc kubenswrapper[4651]: I1011 04:55:59.770174 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0591b55e-2399-450c-9738-6160d1d25ee1-utilities\") pod \"redhat-marketplace-rvmdl\" (UID: \"0591b55e-2399-450c-9738-6160d1d25ee1\") " pod="openshift-marketplace/redhat-marketplace-rvmdl" Oct 11 04:55:59 crc kubenswrapper[4651]: I1011 04:55:59.770244 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0591b55e-2399-450c-9738-6160d1d25ee1-catalog-content\") pod \"redhat-marketplace-rvmdl\" (UID: \"0591b55e-2399-450c-9738-6160d1d25ee1\") " pod="openshift-marketplace/redhat-marketplace-rvmdl" Oct 11 04:55:59 crc kubenswrapper[4651]: I1011 04:55:59.770273 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mn2p9\" (UniqueName: \"kubernetes.io/projected/0591b55e-2399-450c-9738-6160d1d25ee1-kube-api-access-mn2p9\") pod \"redhat-marketplace-rvmdl\" (UID: \"0591b55e-2399-450c-9738-6160d1d25ee1\") " pod="openshift-marketplace/redhat-marketplace-rvmdl" Oct 11 04:55:59 crc kubenswrapper[4651]: I1011 04:55:59.770719 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0591b55e-2399-450c-9738-6160d1d25ee1-utilities\") pod \"redhat-marketplace-rvmdl\" (UID: \"0591b55e-2399-450c-9738-6160d1d25ee1\") " pod="openshift-marketplace/redhat-marketplace-rvmdl" Oct 11 04:55:59 crc kubenswrapper[4651]: I1011 04:55:59.770735 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0591b55e-2399-450c-9738-6160d1d25ee1-catalog-content\") pod \"redhat-marketplace-rvmdl\" (UID: \"0591b55e-2399-450c-9738-6160d1d25ee1\") " pod="openshift-marketplace/redhat-marketplace-rvmdl" Oct 11 04:55:59 crc kubenswrapper[4651]: I1011 04:55:59.786526 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mn2p9\" (UniqueName: \"kubernetes.io/projected/0591b55e-2399-450c-9738-6160d1d25ee1-kube-api-access-mn2p9\") pod \"redhat-marketplace-rvmdl\" (UID: \"0591b55e-2399-450c-9738-6160d1d25ee1\") " pod="openshift-marketplace/redhat-marketplace-rvmdl" Oct 11 04:55:59 crc kubenswrapper[4651]: I1011 04:55:59.855091 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rvmdl" Oct 11 04:55:59 crc kubenswrapper[4651]: I1011 04:55:59.871799 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vrlqw\" (UniqueName: \"kubernetes.io/projected/d813ef9d-786d-41a3-8170-90be0cf304bf-kube-api-access-vrlqw\") pod \"certified-operators-bmwjb\" (UID: \"d813ef9d-786d-41a3-8170-90be0cf304bf\") " pod="openshift-marketplace/certified-operators-bmwjb" Oct 11 04:55:59 crc kubenswrapper[4651]: I1011 04:55:59.871867 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d813ef9d-786d-41a3-8170-90be0cf304bf-utilities\") pod \"certified-operators-bmwjb\" (UID: \"d813ef9d-786d-41a3-8170-90be0cf304bf\") " pod="openshift-marketplace/certified-operators-bmwjb" Oct 11 04:55:59 crc kubenswrapper[4651]: I1011 04:55:59.871928 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d813ef9d-786d-41a3-8170-90be0cf304bf-catalog-content\") pod \"certified-operators-bmwjb\" (UID: \"d813ef9d-786d-41a3-8170-90be0cf304bf\") " pod="openshift-marketplace/certified-operators-bmwjb" Oct 11 04:55:59 crc kubenswrapper[4651]: I1011 04:55:59.875320 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="24573bf5-c576-4e14-b3ac-f33e6ca99af3" path="/var/lib/kubelet/pods/24573bf5-c576-4e14-b3ac-f33e6ca99af3/volumes" Oct 11 04:55:59 crc kubenswrapper[4651]: I1011 04:55:59.875947 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a25ac582-d0a6-4bd7-a9c9-dbed70086212" path="/var/lib/kubelet/pods/a25ac582-d0a6-4bd7-a9c9-dbed70086212/volumes" Oct 11 04:55:59 crc kubenswrapper[4651]: I1011 04:55:59.876438 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ac326965-3277-44c2-bd27-773586999e23" path="/var/lib/kubelet/pods/ac326965-3277-44c2-bd27-773586999e23/volumes" Oct 11 04:55:59 crc kubenswrapper[4651]: I1011 04:55:59.877438 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ad64884c-e97d-4dc2-8f86-a44c537f7068" path="/var/lib/kubelet/pods/ad64884c-e97d-4dc2-8f86-a44c537f7068/volumes" Oct 11 04:55:59 crc kubenswrapper[4651]: I1011 04:55:59.877978 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b23fe5ff-6a7c-4363-b1cd-25e74ba69e7c" path="/var/lib/kubelet/pods/b23fe5ff-6a7c-4363-b1cd-25e74ba69e7c/volumes" Oct 11 04:55:59 crc kubenswrapper[4651]: I1011 04:55:59.973048 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vrlqw\" (UniqueName: \"kubernetes.io/projected/d813ef9d-786d-41a3-8170-90be0cf304bf-kube-api-access-vrlqw\") pod \"certified-operators-bmwjb\" (UID: \"d813ef9d-786d-41a3-8170-90be0cf304bf\") " pod="openshift-marketplace/certified-operators-bmwjb" Oct 11 04:55:59 crc kubenswrapper[4651]: I1011 04:55:59.973099 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d813ef9d-786d-41a3-8170-90be0cf304bf-utilities\") pod \"certified-operators-bmwjb\" (UID: \"d813ef9d-786d-41a3-8170-90be0cf304bf\") " pod="openshift-marketplace/certified-operators-bmwjb" Oct 11 04:55:59 crc kubenswrapper[4651]: I1011 04:55:59.973122 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d813ef9d-786d-41a3-8170-90be0cf304bf-catalog-content\") pod \"certified-operators-bmwjb\" (UID: \"d813ef9d-786d-41a3-8170-90be0cf304bf\") " pod="openshift-marketplace/certified-operators-bmwjb" Oct 11 04:55:59 crc kubenswrapper[4651]: I1011 04:55:59.974778 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d813ef9d-786d-41a3-8170-90be0cf304bf-catalog-content\") pod \"certified-operators-bmwjb\" (UID: \"d813ef9d-786d-41a3-8170-90be0cf304bf\") " pod="openshift-marketplace/certified-operators-bmwjb" Oct 11 04:55:59 crc kubenswrapper[4651]: I1011 04:55:59.975036 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d813ef9d-786d-41a3-8170-90be0cf304bf-utilities\") pod \"certified-operators-bmwjb\" (UID: \"d813ef9d-786d-41a3-8170-90be0cf304bf\") " pod="openshift-marketplace/certified-operators-bmwjb" Oct 11 04:55:59 crc kubenswrapper[4651]: I1011 04:55:59.997374 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vrlqw\" (UniqueName: \"kubernetes.io/projected/d813ef9d-786d-41a3-8170-90be0cf304bf-kube-api-access-vrlqw\") pod \"certified-operators-bmwjb\" (UID: \"d813ef9d-786d-41a3-8170-90be0cf304bf\") " pod="openshift-marketplace/certified-operators-bmwjb" Oct 11 04:56:00 crc kubenswrapper[4651]: I1011 04:56:00.017575 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-c7pll" Oct 11 04:56:00 crc kubenswrapper[4651]: I1011 04:56:00.020547 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-c7pll" Oct 11 04:56:00 crc kubenswrapper[4651]: I1011 04:56:00.066777 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bmwjb" Oct 11 04:56:00 crc kubenswrapper[4651]: I1011 04:56:00.236991 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-rvmdl"] Oct 11 04:56:00 crc kubenswrapper[4651]: W1011 04:56:00.244612 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0591b55e_2399_450c_9738_6160d1d25ee1.slice/crio-131a6e0e9dc9210c840374bcb80d729d8080e85e90d3be3f1f96b15c80d00e0e WatchSource:0}: Error finding container 131a6e0e9dc9210c840374bcb80d729d8080e85e90d3be3f1f96b15c80d00e0e: Status 404 returned error can't find the container with id 131a6e0e9dc9210c840374bcb80d729d8080e85e90d3be3f1f96b15c80d00e0e Oct 11 04:56:00 crc kubenswrapper[4651]: I1011 04:56:00.266553 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-bmwjb"] Oct 11 04:56:01 crc kubenswrapper[4651]: I1011 04:56:01.023096 4651 generic.go:334] "Generic (PLEG): container finished" podID="d813ef9d-786d-41a3-8170-90be0cf304bf" containerID="c858d4295ae4a9cec6a6004c8949cc9843c10b8677d89aac5ff8594347a23619" exitCode=0 Oct 11 04:56:01 crc kubenswrapper[4651]: I1011 04:56:01.023154 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bmwjb" event={"ID":"d813ef9d-786d-41a3-8170-90be0cf304bf","Type":"ContainerDied","Data":"c858d4295ae4a9cec6a6004c8949cc9843c10b8677d89aac5ff8594347a23619"} Oct 11 04:56:01 crc kubenswrapper[4651]: I1011 04:56:01.023395 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bmwjb" event={"ID":"d813ef9d-786d-41a3-8170-90be0cf304bf","Type":"ContainerStarted","Data":"bb938cb13371eab3a3c5cce9f25eb5d6806da2852bd6dfc135fea987c997bb5b"} Oct 11 04:56:01 crc kubenswrapper[4651]: I1011 04:56:01.025422 4651 generic.go:334] "Generic (PLEG): container finished" podID="0591b55e-2399-450c-9738-6160d1d25ee1" containerID="34f439245ff7a4f1289cfdcd2f40ff8f46be2150d35860311d9386b4cf42f44e" exitCode=0 Oct 11 04:56:01 crc kubenswrapper[4651]: I1011 04:56:01.025964 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rvmdl" event={"ID":"0591b55e-2399-450c-9738-6160d1d25ee1","Type":"ContainerDied","Data":"34f439245ff7a4f1289cfdcd2f40ff8f46be2150d35860311d9386b4cf42f44e"} Oct 11 04:56:01 crc kubenswrapper[4651]: I1011 04:56:01.025987 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rvmdl" event={"ID":"0591b55e-2399-450c-9738-6160d1d25ee1","Type":"ContainerStarted","Data":"131a6e0e9dc9210c840374bcb80d729d8080e85e90d3be3f1f96b15c80d00e0e"} Oct 11 04:56:01 crc kubenswrapper[4651]: I1011 04:56:01.925763 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-strx6"] Oct 11 04:56:01 crc kubenswrapper[4651]: I1011 04:56:01.927109 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-strx6" Oct 11 04:56:01 crc kubenswrapper[4651]: I1011 04:56:01.929579 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Oct 11 04:56:01 crc kubenswrapper[4651]: I1011 04:56:01.936852 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-strx6"] Oct 11 04:56:02 crc kubenswrapper[4651]: I1011 04:56:02.031672 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rvmdl" event={"ID":"0591b55e-2399-450c-9738-6160d1d25ee1","Type":"ContainerStarted","Data":"06a11f6a25b40832897a53680c5ca8e3a13033835eb5a08d4a8828c02a03ee40"} Oct 11 04:56:02 crc kubenswrapper[4651]: I1011 04:56:02.034461 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bmwjb" event={"ID":"d813ef9d-786d-41a3-8170-90be0cf304bf","Type":"ContainerStarted","Data":"4d354a9282ab5b0a1b1eb2689f3d46874a09247c3f90ad0236b82ac92216ff43"} Oct 11 04:56:02 crc kubenswrapper[4651]: I1011 04:56:02.097673 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/60df6e10-9dc8-478b-a424-a86b47a1ba0a-catalog-content\") pod \"redhat-operators-strx6\" (UID: \"60df6e10-9dc8-478b-a424-a86b47a1ba0a\") " pod="openshift-marketplace/redhat-operators-strx6" Oct 11 04:56:02 crc kubenswrapper[4651]: I1011 04:56:02.097723 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/60df6e10-9dc8-478b-a424-a86b47a1ba0a-utilities\") pod \"redhat-operators-strx6\" (UID: \"60df6e10-9dc8-478b-a424-a86b47a1ba0a\") " pod="openshift-marketplace/redhat-operators-strx6" Oct 11 04:56:02 crc kubenswrapper[4651]: I1011 04:56:02.097758 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hxjl6\" (UniqueName: \"kubernetes.io/projected/60df6e10-9dc8-478b-a424-a86b47a1ba0a-kube-api-access-hxjl6\") pod \"redhat-operators-strx6\" (UID: \"60df6e10-9dc8-478b-a424-a86b47a1ba0a\") " pod="openshift-marketplace/redhat-operators-strx6" Oct 11 04:56:02 crc kubenswrapper[4651]: I1011 04:56:02.127330 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-5tgq7"] Oct 11 04:56:02 crc kubenswrapper[4651]: I1011 04:56:02.129895 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5tgq7" Oct 11 04:56:02 crc kubenswrapper[4651]: I1011 04:56:02.133023 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Oct 11 04:56:02 crc kubenswrapper[4651]: I1011 04:56:02.136012 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-5tgq7"] Oct 11 04:56:02 crc kubenswrapper[4651]: I1011 04:56:02.198633 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/60df6e10-9dc8-478b-a424-a86b47a1ba0a-catalog-content\") pod \"redhat-operators-strx6\" (UID: \"60df6e10-9dc8-478b-a424-a86b47a1ba0a\") " pod="openshift-marketplace/redhat-operators-strx6" Oct 11 04:56:02 crc kubenswrapper[4651]: I1011 04:56:02.198710 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/60df6e10-9dc8-478b-a424-a86b47a1ba0a-utilities\") pod \"redhat-operators-strx6\" (UID: \"60df6e10-9dc8-478b-a424-a86b47a1ba0a\") " pod="openshift-marketplace/redhat-operators-strx6" Oct 11 04:56:02 crc kubenswrapper[4651]: I1011 04:56:02.198769 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hxjl6\" (UniqueName: \"kubernetes.io/projected/60df6e10-9dc8-478b-a424-a86b47a1ba0a-kube-api-access-hxjl6\") pod \"redhat-operators-strx6\" (UID: \"60df6e10-9dc8-478b-a424-a86b47a1ba0a\") " pod="openshift-marketplace/redhat-operators-strx6" Oct 11 04:56:02 crc kubenswrapper[4651]: I1011 04:56:02.199195 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/60df6e10-9dc8-478b-a424-a86b47a1ba0a-utilities\") pod \"redhat-operators-strx6\" (UID: \"60df6e10-9dc8-478b-a424-a86b47a1ba0a\") " pod="openshift-marketplace/redhat-operators-strx6" Oct 11 04:56:02 crc kubenswrapper[4651]: I1011 04:56:02.199298 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/60df6e10-9dc8-478b-a424-a86b47a1ba0a-catalog-content\") pod \"redhat-operators-strx6\" (UID: \"60df6e10-9dc8-478b-a424-a86b47a1ba0a\") " pod="openshift-marketplace/redhat-operators-strx6" Oct 11 04:56:02 crc kubenswrapper[4651]: I1011 04:56:02.214965 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hxjl6\" (UniqueName: \"kubernetes.io/projected/60df6e10-9dc8-478b-a424-a86b47a1ba0a-kube-api-access-hxjl6\") pod \"redhat-operators-strx6\" (UID: \"60df6e10-9dc8-478b-a424-a86b47a1ba0a\") " pod="openshift-marketplace/redhat-operators-strx6" Oct 11 04:56:02 crc kubenswrapper[4651]: I1011 04:56:02.256610 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-strx6" Oct 11 04:56:02 crc kubenswrapper[4651]: I1011 04:56:02.299655 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a3f42662-a802-49b3-bdb2-ad9746e0f0e7-catalog-content\") pod \"community-operators-5tgq7\" (UID: \"a3f42662-a802-49b3-bdb2-ad9746e0f0e7\") " pod="openshift-marketplace/community-operators-5tgq7" Oct 11 04:56:02 crc kubenswrapper[4651]: I1011 04:56:02.300286 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a3f42662-a802-49b3-bdb2-ad9746e0f0e7-utilities\") pod \"community-operators-5tgq7\" (UID: \"a3f42662-a802-49b3-bdb2-ad9746e0f0e7\") " pod="openshift-marketplace/community-operators-5tgq7" Oct 11 04:56:02 crc kubenswrapper[4651]: I1011 04:56:02.300436 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ndp6g\" (UniqueName: \"kubernetes.io/projected/a3f42662-a802-49b3-bdb2-ad9746e0f0e7-kube-api-access-ndp6g\") pod \"community-operators-5tgq7\" (UID: \"a3f42662-a802-49b3-bdb2-ad9746e0f0e7\") " pod="openshift-marketplace/community-operators-5tgq7" Oct 11 04:56:02 crc kubenswrapper[4651]: I1011 04:56:02.405256 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a3f42662-a802-49b3-bdb2-ad9746e0f0e7-catalog-content\") pod \"community-operators-5tgq7\" (UID: \"a3f42662-a802-49b3-bdb2-ad9746e0f0e7\") " pod="openshift-marketplace/community-operators-5tgq7" Oct 11 04:56:02 crc kubenswrapper[4651]: I1011 04:56:02.405367 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a3f42662-a802-49b3-bdb2-ad9746e0f0e7-utilities\") pod \"community-operators-5tgq7\" (UID: \"a3f42662-a802-49b3-bdb2-ad9746e0f0e7\") " pod="openshift-marketplace/community-operators-5tgq7" Oct 11 04:56:02 crc kubenswrapper[4651]: I1011 04:56:02.405417 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ndp6g\" (UniqueName: \"kubernetes.io/projected/a3f42662-a802-49b3-bdb2-ad9746e0f0e7-kube-api-access-ndp6g\") pod \"community-operators-5tgq7\" (UID: \"a3f42662-a802-49b3-bdb2-ad9746e0f0e7\") " pod="openshift-marketplace/community-operators-5tgq7" Oct 11 04:56:02 crc kubenswrapper[4651]: I1011 04:56:02.405782 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a3f42662-a802-49b3-bdb2-ad9746e0f0e7-catalog-content\") pod \"community-operators-5tgq7\" (UID: \"a3f42662-a802-49b3-bdb2-ad9746e0f0e7\") " pod="openshift-marketplace/community-operators-5tgq7" Oct 11 04:56:02 crc kubenswrapper[4651]: I1011 04:56:02.406574 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a3f42662-a802-49b3-bdb2-ad9746e0f0e7-utilities\") pod \"community-operators-5tgq7\" (UID: \"a3f42662-a802-49b3-bdb2-ad9746e0f0e7\") " pod="openshift-marketplace/community-operators-5tgq7" Oct 11 04:56:02 crc kubenswrapper[4651]: I1011 04:56:02.423343 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-strx6"] Oct 11 04:56:02 crc kubenswrapper[4651]: I1011 04:56:02.429685 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ndp6g\" (UniqueName: \"kubernetes.io/projected/a3f42662-a802-49b3-bdb2-ad9746e0f0e7-kube-api-access-ndp6g\") pod \"community-operators-5tgq7\" (UID: \"a3f42662-a802-49b3-bdb2-ad9746e0f0e7\") " pod="openshift-marketplace/community-operators-5tgq7" Oct 11 04:56:02 crc kubenswrapper[4651]: I1011 04:56:02.454052 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5tgq7" Oct 11 04:56:02 crc kubenswrapper[4651]: I1011 04:56:02.836308 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-5tgq7"] Oct 11 04:56:02 crc kubenswrapper[4651]: W1011 04:56:02.843020 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda3f42662_a802_49b3_bdb2_ad9746e0f0e7.slice/crio-6835b3b96f51d3e182f4fbca21cc06b8ac014e1db2531bc2367850cd548a3ec5 WatchSource:0}: Error finding container 6835b3b96f51d3e182f4fbca21cc06b8ac014e1db2531bc2367850cd548a3ec5: Status 404 returned error can't find the container with id 6835b3b96f51d3e182f4fbca21cc06b8ac014e1db2531bc2367850cd548a3ec5 Oct 11 04:56:03 crc kubenswrapper[4651]: I1011 04:56:03.040610 4651 generic.go:334] "Generic (PLEG): container finished" podID="60df6e10-9dc8-478b-a424-a86b47a1ba0a" containerID="209a9c38d2e3310cccce614c9ff74dedeb5f64cdbb94a1603549f9216ce60ba3" exitCode=0 Oct 11 04:56:03 crc kubenswrapper[4651]: I1011 04:56:03.040714 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-strx6" event={"ID":"60df6e10-9dc8-478b-a424-a86b47a1ba0a","Type":"ContainerDied","Data":"209a9c38d2e3310cccce614c9ff74dedeb5f64cdbb94a1603549f9216ce60ba3"} Oct 11 04:56:03 crc kubenswrapper[4651]: I1011 04:56:03.040795 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-strx6" event={"ID":"60df6e10-9dc8-478b-a424-a86b47a1ba0a","Type":"ContainerStarted","Data":"f7f5d103e037434b2f6e75e95577ff37dfc767f7b3d347d0e4ca84ee6e22da9d"} Oct 11 04:56:03 crc kubenswrapper[4651]: I1011 04:56:03.042492 4651 generic.go:334] "Generic (PLEG): container finished" podID="d813ef9d-786d-41a3-8170-90be0cf304bf" containerID="4d354a9282ab5b0a1b1eb2689f3d46874a09247c3f90ad0236b82ac92216ff43" exitCode=0 Oct 11 04:56:03 crc kubenswrapper[4651]: I1011 04:56:03.042619 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bmwjb" event={"ID":"d813ef9d-786d-41a3-8170-90be0cf304bf","Type":"ContainerDied","Data":"4d354a9282ab5b0a1b1eb2689f3d46874a09247c3f90ad0236b82ac92216ff43"} Oct 11 04:56:03 crc kubenswrapper[4651]: I1011 04:56:03.048156 4651 generic.go:334] "Generic (PLEG): container finished" podID="0591b55e-2399-450c-9738-6160d1d25ee1" containerID="06a11f6a25b40832897a53680c5ca8e3a13033835eb5a08d4a8828c02a03ee40" exitCode=0 Oct 11 04:56:03 crc kubenswrapper[4651]: I1011 04:56:03.048489 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rvmdl" event={"ID":"0591b55e-2399-450c-9738-6160d1d25ee1","Type":"ContainerDied","Data":"06a11f6a25b40832897a53680c5ca8e3a13033835eb5a08d4a8828c02a03ee40"} Oct 11 04:56:03 crc kubenswrapper[4651]: I1011 04:56:03.052546 4651 generic.go:334] "Generic (PLEG): container finished" podID="a3f42662-a802-49b3-bdb2-ad9746e0f0e7" containerID="a635ffd469e772c270979f38d96c66c7c5eade5afda1a16a2439106046712438" exitCode=0 Oct 11 04:56:03 crc kubenswrapper[4651]: I1011 04:56:03.052570 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5tgq7" event={"ID":"a3f42662-a802-49b3-bdb2-ad9746e0f0e7","Type":"ContainerDied","Data":"a635ffd469e772c270979f38d96c66c7c5eade5afda1a16a2439106046712438"} Oct 11 04:56:03 crc kubenswrapper[4651]: I1011 04:56:03.052589 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5tgq7" event={"ID":"a3f42662-a802-49b3-bdb2-ad9746e0f0e7","Type":"ContainerStarted","Data":"6835b3b96f51d3e182f4fbca21cc06b8ac014e1db2531bc2367850cd548a3ec5"} Oct 11 04:56:04 crc kubenswrapper[4651]: I1011 04:56:04.059778 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5tgq7" event={"ID":"a3f42662-a802-49b3-bdb2-ad9746e0f0e7","Type":"ContainerStarted","Data":"fe727a964bdfa867705d5eb9aef73f5e65486805a0b20502c009d048a61f9578"} Oct 11 04:56:04 crc kubenswrapper[4651]: I1011 04:56:04.061538 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-strx6" event={"ID":"60df6e10-9dc8-478b-a424-a86b47a1ba0a","Type":"ContainerStarted","Data":"3e98d2363dca35614b6df32ad71bc92f7ee9c221851e9b7d0511d395f2471b91"} Oct 11 04:56:04 crc kubenswrapper[4651]: I1011 04:56:04.065006 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bmwjb" event={"ID":"d813ef9d-786d-41a3-8170-90be0cf304bf","Type":"ContainerStarted","Data":"24fa5a3da3046054a2ce10fdcdb642d5eff33fb835c9e4d602dd55996d6dc4ca"} Oct 11 04:56:04 crc kubenswrapper[4651]: I1011 04:56:04.066553 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rvmdl" event={"ID":"0591b55e-2399-450c-9738-6160d1d25ee1","Type":"ContainerStarted","Data":"c6d2de4204b6f938c3c5bc0e363192eccec8ae30a7f37ffd56dcba097376e93b"} Oct 11 04:56:04 crc kubenswrapper[4651]: I1011 04:56:04.114131 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-bmwjb" podStartSLOduration=2.494724864 podStartE2EDuration="5.114110587s" podCreationTimestamp="2025-10-11 04:55:59 +0000 UTC" firstStartedPulling="2025-10-11 04:56:01.024666627 +0000 UTC m=+281.920899423" lastFinishedPulling="2025-10-11 04:56:03.64405232 +0000 UTC m=+284.540285146" observedRunningTime="2025-10-11 04:56:04.11299004 +0000 UTC m=+285.009222836" watchObservedRunningTime="2025-10-11 04:56:04.114110587 +0000 UTC m=+285.010343383" Oct 11 04:56:04 crc kubenswrapper[4651]: I1011 04:56:04.131262 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-rvmdl" podStartSLOduration=2.652199316 podStartE2EDuration="5.131248727s" podCreationTimestamp="2025-10-11 04:55:59 +0000 UTC" firstStartedPulling="2025-10-11 04:56:01.026892231 +0000 UTC m=+281.923125027" lastFinishedPulling="2025-10-11 04:56:03.505941602 +0000 UTC m=+284.402174438" observedRunningTime="2025-10-11 04:56:04.127228598 +0000 UTC m=+285.023461404" watchObservedRunningTime="2025-10-11 04:56:04.131248727 +0000 UTC m=+285.027481523" Oct 11 04:56:05 crc kubenswrapper[4651]: I1011 04:56:05.073730 4651 generic.go:334] "Generic (PLEG): container finished" podID="60df6e10-9dc8-478b-a424-a86b47a1ba0a" containerID="3e98d2363dca35614b6df32ad71bc92f7ee9c221851e9b7d0511d395f2471b91" exitCode=0 Oct 11 04:56:05 crc kubenswrapper[4651]: I1011 04:56:05.073946 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-strx6" event={"ID":"60df6e10-9dc8-478b-a424-a86b47a1ba0a","Type":"ContainerDied","Data":"3e98d2363dca35614b6df32ad71bc92f7ee9c221851e9b7d0511d395f2471b91"} Oct 11 04:56:05 crc kubenswrapper[4651]: I1011 04:56:05.076874 4651 generic.go:334] "Generic (PLEG): container finished" podID="a3f42662-a802-49b3-bdb2-ad9746e0f0e7" containerID="fe727a964bdfa867705d5eb9aef73f5e65486805a0b20502c009d048a61f9578" exitCode=0 Oct 11 04:56:05 crc kubenswrapper[4651]: I1011 04:56:05.076961 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5tgq7" event={"ID":"a3f42662-a802-49b3-bdb2-ad9746e0f0e7","Type":"ContainerDied","Data":"fe727a964bdfa867705d5eb9aef73f5e65486805a0b20502c009d048a61f9578"} Oct 11 04:56:06 crc kubenswrapper[4651]: I1011 04:56:06.082878 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-strx6" event={"ID":"60df6e10-9dc8-478b-a424-a86b47a1ba0a","Type":"ContainerStarted","Data":"137ea7a218109fcc22b314b49755deb80646c5f471843f985489f3cc1b527582"} Oct 11 04:56:06 crc kubenswrapper[4651]: I1011 04:56:06.084871 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5tgq7" event={"ID":"a3f42662-a802-49b3-bdb2-ad9746e0f0e7","Type":"ContainerStarted","Data":"6ab7e7fae09b706cf3825cf2d5f1396ca7c39d0f82a91bf2632709ec7452f6a9"} Oct 11 04:56:06 crc kubenswrapper[4651]: I1011 04:56:06.106854 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-strx6" podStartSLOduration=2.691672755 podStartE2EDuration="5.106837082s" podCreationTimestamp="2025-10-11 04:56:01 +0000 UTC" firstStartedPulling="2025-10-11 04:56:03.042430004 +0000 UTC m=+283.938662800" lastFinishedPulling="2025-10-11 04:56:05.457594331 +0000 UTC m=+286.353827127" observedRunningTime="2025-10-11 04:56:06.104116215 +0000 UTC m=+287.000349031" watchObservedRunningTime="2025-10-11 04:56:06.106837082 +0000 UTC m=+287.003069888" Oct 11 04:56:09 crc kubenswrapper[4651]: I1011 04:56:09.856086 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-rvmdl" Oct 11 04:56:09 crc kubenswrapper[4651]: I1011 04:56:09.856432 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-rvmdl" Oct 11 04:56:09 crc kubenswrapper[4651]: I1011 04:56:09.903525 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-rvmdl" Oct 11 04:56:09 crc kubenswrapper[4651]: I1011 04:56:09.918359 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-5tgq7" podStartSLOduration=5.287099123 podStartE2EDuration="7.918341075s" podCreationTimestamp="2025-10-11 04:56:02 +0000 UTC" firstStartedPulling="2025-10-11 04:56:03.05453558 +0000 UTC m=+283.950768376" lastFinishedPulling="2025-10-11 04:56:05.685777532 +0000 UTC m=+286.582010328" observedRunningTime="2025-10-11 04:56:06.137175974 +0000 UTC m=+287.033408770" watchObservedRunningTime="2025-10-11 04:56:09.918341075 +0000 UTC m=+290.814573881" Oct 11 04:56:10 crc kubenswrapper[4651]: I1011 04:56:10.066907 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-bmwjb" Oct 11 04:56:10 crc kubenswrapper[4651]: I1011 04:56:10.067256 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-bmwjb" Oct 11 04:56:10 crc kubenswrapper[4651]: I1011 04:56:10.107217 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-bmwjb" Oct 11 04:56:10 crc kubenswrapper[4651]: I1011 04:56:10.152473 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-rvmdl" Oct 11 04:56:10 crc kubenswrapper[4651]: I1011 04:56:10.161483 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-bmwjb" Oct 11 04:56:12 crc kubenswrapper[4651]: I1011 04:56:12.257213 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-strx6" Oct 11 04:56:12 crc kubenswrapper[4651]: I1011 04:56:12.257506 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-strx6" Oct 11 04:56:12 crc kubenswrapper[4651]: I1011 04:56:12.292347 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-strx6" Oct 11 04:56:12 crc kubenswrapper[4651]: I1011 04:56:12.455106 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-5tgq7" Oct 11 04:56:12 crc kubenswrapper[4651]: I1011 04:56:12.455386 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-5tgq7" Oct 11 04:56:12 crc kubenswrapper[4651]: I1011 04:56:12.487385 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-5tgq7" Oct 11 04:56:13 crc kubenswrapper[4651]: I1011 04:56:13.171374 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-5tgq7" Oct 11 04:56:13 crc kubenswrapper[4651]: I1011 04:56:13.173429 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-strx6" Oct 11 04:57:16 crc kubenswrapper[4651]: I1011 04:57:16.310238 4651 patch_prober.go:28] interesting pod/machine-config-daemon-78jnv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 04:57:16 crc kubenswrapper[4651]: I1011 04:57:16.310798 4651 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 04:57:46 crc kubenswrapper[4651]: I1011 04:57:46.310241 4651 patch_prober.go:28] interesting pod/machine-config-daemon-78jnv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 04:57:46 crc kubenswrapper[4651]: I1011 04:57:46.310896 4651 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 04:58:03 crc kubenswrapper[4651]: I1011 04:58:03.575574 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-hqzpc"] Oct 11 04:58:03 crc kubenswrapper[4651]: I1011 04:58:03.576638 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-hqzpc" Oct 11 04:58:03 crc kubenswrapper[4651]: I1011 04:58:03.590339 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-hqzpc"] Oct 11 04:58:03 crc kubenswrapper[4651]: I1011 04:58:03.747748 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/2a0ceeb0-0006-4108-8c65-12828f07ab5a-bound-sa-token\") pod \"image-registry-66df7c8f76-hqzpc\" (UID: \"2a0ceeb0-0006-4108-8c65-12828f07ab5a\") " pod="openshift-image-registry/image-registry-66df7c8f76-hqzpc" Oct 11 04:58:03 crc kubenswrapper[4651]: I1011 04:58:03.748044 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2a0ceeb0-0006-4108-8c65-12828f07ab5a-trusted-ca\") pod \"image-registry-66df7c8f76-hqzpc\" (UID: \"2a0ceeb0-0006-4108-8c65-12828f07ab5a\") " pod="openshift-image-registry/image-registry-66df7c8f76-hqzpc" Oct 11 04:58:03 crc kubenswrapper[4651]: I1011 04:58:03.748072 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/2a0ceeb0-0006-4108-8c65-12828f07ab5a-registry-tls\") pod \"image-registry-66df7c8f76-hqzpc\" (UID: \"2a0ceeb0-0006-4108-8c65-12828f07ab5a\") " pod="openshift-image-registry/image-registry-66df7c8f76-hqzpc" Oct 11 04:58:03 crc kubenswrapper[4651]: I1011 04:58:03.748101 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/2a0ceeb0-0006-4108-8c65-12828f07ab5a-ca-trust-extracted\") pod \"image-registry-66df7c8f76-hqzpc\" (UID: \"2a0ceeb0-0006-4108-8c65-12828f07ab5a\") " pod="openshift-image-registry/image-registry-66df7c8f76-hqzpc" Oct 11 04:58:03 crc kubenswrapper[4651]: I1011 04:58:03.748121 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p7wbx\" (UniqueName: \"kubernetes.io/projected/2a0ceeb0-0006-4108-8c65-12828f07ab5a-kube-api-access-p7wbx\") pod \"image-registry-66df7c8f76-hqzpc\" (UID: \"2a0ceeb0-0006-4108-8c65-12828f07ab5a\") " pod="openshift-image-registry/image-registry-66df7c8f76-hqzpc" Oct 11 04:58:03 crc kubenswrapper[4651]: I1011 04:58:03.748227 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/2a0ceeb0-0006-4108-8c65-12828f07ab5a-registry-certificates\") pod \"image-registry-66df7c8f76-hqzpc\" (UID: \"2a0ceeb0-0006-4108-8c65-12828f07ab5a\") " pod="openshift-image-registry/image-registry-66df7c8f76-hqzpc" Oct 11 04:58:03 crc kubenswrapper[4651]: I1011 04:58:03.748276 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/2a0ceeb0-0006-4108-8c65-12828f07ab5a-installation-pull-secrets\") pod \"image-registry-66df7c8f76-hqzpc\" (UID: \"2a0ceeb0-0006-4108-8c65-12828f07ab5a\") " pod="openshift-image-registry/image-registry-66df7c8f76-hqzpc" Oct 11 04:58:03 crc kubenswrapper[4651]: I1011 04:58:03.748309 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-hqzpc\" (UID: \"2a0ceeb0-0006-4108-8c65-12828f07ab5a\") " pod="openshift-image-registry/image-registry-66df7c8f76-hqzpc" Oct 11 04:58:03 crc kubenswrapper[4651]: I1011 04:58:03.774287 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-hqzpc\" (UID: \"2a0ceeb0-0006-4108-8c65-12828f07ab5a\") " pod="openshift-image-registry/image-registry-66df7c8f76-hqzpc" Oct 11 04:58:03 crc kubenswrapper[4651]: I1011 04:58:03.849411 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2a0ceeb0-0006-4108-8c65-12828f07ab5a-trusted-ca\") pod \"image-registry-66df7c8f76-hqzpc\" (UID: \"2a0ceeb0-0006-4108-8c65-12828f07ab5a\") " pod="openshift-image-registry/image-registry-66df7c8f76-hqzpc" Oct 11 04:58:03 crc kubenswrapper[4651]: I1011 04:58:03.849478 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/2a0ceeb0-0006-4108-8c65-12828f07ab5a-registry-tls\") pod \"image-registry-66df7c8f76-hqzpc\" (UID: \"2a0ceeb0-0006-4108-8c65-12828f07ab5a\") " pod="openshift-image-registry/image-registry-66df7c8f76-hqzpc" Oct 11 04:58:03 crc kubenswrapper[4651]: I1011 04:58:03.849502 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/2a0ceeb0-0006-4108-8c65-12828f07ab5a-ca-trust-extracted\") pod \"image-registry-66df7c8f76-hqzpc\" (UID: \"2a0ceeb0-0006-4108-8c65-12828f07ab5a\") " pod="openshift-image-registry/image-registry-66df7c8f76-hqzpc" Oct 11 04:58:03 crc kubenswrapper[4651]: I1011 04:58:03.849520 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p7wbx\" (UniqueName: \"kubernetes.io/projected/2a0ceeb0-0006-4108-8c65-12828f07ab5a-kube-api-access-p7wbx\") pod \"image-registry-66df7c8f76-hqzpc\" (UID: \"2a0ceeb0-0006-4108-8c65-12828f07ab5a\") " pod="openshift-image-registry/image-registry-66df7c8f76-hqzpc" Oct 11 04:58:03 crc kubenswrapper[4651]: I1011 04:58:03.849549 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/2a0ceeb0-0006-4108-8c65-12828f07ab5a-registry-certificates\") pod \"image-registry-66df7c8f76-hqzpc\" (UID: \"2a0ceeb0-0006-4108-8c65-12828f07ab5a\") " pod="openshift-image-registry/image-registry-66df7c8f76-hqzpc" Oct 11 04:58:03 crc kubenswrapper[4651]: I1011 04:58:03.849567 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/2a0ceeb0-0006-4108-8c65-12828f07ab5a-installation-pull-secrets\") pod \"image-registry-66df7c8f76-hqzpc\" (UID: \"2a0ceeb0-0006-4108-8c65-12828f07ab5a\") " pod="openshift-image-registry/image-registry-66df7c8f76-hqzpc" Oct 11 04:58:03 crc kubenswrapper[4651]: I1011 04:58:03.849992 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/2a0ceeb0-0006-4108-8c65-12828f07ab5a-bound-sa-token\") pod \"image-registry-66df7c8f76-hqzpc\" (UID: \"2a0ceeb0-0006-4108-8c65-12828f07ab5a\") " pod="openshift-image-registry/image-registry-66df7c8f76-hqzpc" Oct 11 04:58:03 crc kubenswrapper[4651]: I1011 04:58:03.850064 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/2a0ceeb0-0006-4108-8c65-12828f07ab5a-ca-trust-extracted\") pod \"image-registry-66df7c8f76-hqzpc\" (UID: \"2a0ceeb0-0006-4108-8c65-12828f07ab5a\") " pod="openshift-image-registry/image-registry-66df7c8f76-hqzpc" Oct 11 04:58:03 crc kubenswrapper[4651]: I1011 04:58:03.851680 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2a0ceeb0-0006-4108-8c65-12828f07ab5a-trusted-ca\") pod \"image-registry-66df7c8f76-hqzpc\" (UID: \"2a0ceeb0-0006-4108-8c65-12828f07ab5a\") " pod="openshift-image-registry/image-registry-66df7c8f76-hqzpc" Oct 11 04:58:03 crc kubenswrapper[4651]: I1011 04:58:03.851932 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/2a0ceeb0-0006-4108-8c65-12828f07ab5a-registry-certificates\") pod \"image-registry-66df7c8f76-hqzpc\" (UID: \"2a0ceeb0-0006-4108-8c65-12828f07ab5a\") " pod="openshift-image-registry/image-registry-66df7c8f76-hqzpc" Oct 11 04:58:03 crc kubenswrapper[4651]: I1011 04:58:03.860210 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/2a0ceeb0-0006-4108-8c65-12828f07ab5a-installation-pull-secrets\") pod \"image-registry-66df7c8f76-hqzpc\" (UID: \"2a0ceeb0-0006-4108-8c65-12828f07ab5a\") " pod="openshift-image-registry/image-registry-66df7c8f76-hqzpc" Oct 11 04:58:03 crc kubenswrapper[4651]: I1011 04:58:03.860521 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/2a0ceeb0-0006-4108-8c65-12828f07ab5a-registry-tls\") pod \"image-registry-66df7c8f76-hqzpc\" (UID: \"2a0ceeb0-0006-4108-8c65-12828f07ab5a\") " pod="openshift-image-registry/image-registry-66df7c8f76-hqzpc" Oct 11 04:58:03 crc kubenswrapper[4651]: I1011 04:58:03.866878 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p7wbx\" (UniqueName: \"kubernetes.io/projected/2a0ceeb0-0006-4108-8c65-12828f07ab5a-kube-api-access-p7wbx\") pod \"image-registry-66df7c8f76-hqzpc\" (UID: \"2a0ceeb0-0006-4108-8c65-12828f07ab5a\") " pod="openshift-image-registry/image-registry-66df7c8f76-hqzpc" Oct 11 04:58:03 crc kubenswrapper[4651]: I1011 04:58:03.868752 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/2a0ceeb0-0006-4108-8c65-12828f07ab5a-bound-sa-token\") pod \"image-registry-66df7c8f76-hqzpc\" (UID: \"2a0ceeb0-0006-4108-8c65-12828f07ab5a\") " pod="openshift-image-registry/image-registry-66df7c8f76-hqzpc" Oct 11 04:58:03 crc kubenswrapper[4651]: I1011 04:58:03.895803 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-hqzpc" Oct 11 04:58:04 crc kubenswrapper[4651]: I1011 04:58:04.342425 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-hqzpc"] Oct 11 04:58:04 crc kubenswrapper[4651]: I1011 04:58:04.800111 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-hqzpc" event={"ID":"2a0ceeb0-0006-4108-8c65-12828f07ab5a","Type":"ContainerStarted","Data":"bfcf8f47ab58eb215d04ae324f782f9946bab28ef7ddb2b9faaef36ab78314fa"} Oct 11 04:58:04 crc kubenswrapper[4651]: I1011 04:58:04.800175 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-hqzpc" event={"ID":"2a0ceeb0-0006-4108-8c65-12828f07ab5a","Type":"ContainerStarted","Data":"8c49427929cb71420236150d60e3de61bdc351a92e48f099362ebfc3ef93530e"} Oct 11 04:58:04 crc kubenswrapper[4651]: I1011 04:58:04.800359 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-hqzpc" Oct 11 04:58:04 crc kubenswrapper[4651]: I1011 04:58:04.820799 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-hqzpc" podStartSLOduration=1.8207803120000001 podStartE2EDuration="1.820780312s" podCreationTimestamp="2025-10-11 04:58:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 04:58:04.819037829 +0000 UTC m=+405.715270645" watchObservedRunningTime="2025-10-11 04:58:04.820780312 +0000 UTC m=+405.717013118" Oct 11 04:58:16 crc kubenswrapper[4651]: I1011 04:58:16.311083 4651 patch_prober.go:28] interesting pod/machine-config-daemon-78jnv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 04:58:16 crc kubenswrapper[4651]: I1011 04:58:16.312278 4651 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 04:58:16 crc kubenswrapper[4651]: I1011 04:58:16.312393 4651 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" Oct 11 04:58:16 crc kubenswrapper[4651]: I1011 04:58:16.313548 4651 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"5ba3105ff1f646ac3c317b8777235fea6078905260c012507f606af9534c8bd2"} pod="openshift-machine-config-operator/machine-config-daemon-78jnv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 11 04:58:16 crc kubenswrapper[4651]: I1011 04:58:16.313658 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" containerName="machine-config-daemon" containerID="cri-o://5ba3105ff1f646ac3c317b8777235fea6078905260c012507f606af9534c8bd2" gracePeriod=600 Oct 11 04:58:16 crc kubenswrapper[4651]: I1011 04:58:16.884374 4651 generic.go:334] "Generic (PLEG): container finished" podID="519a1ae1-e964-48b0-8b61-835146df28c1" containerID="5ba3105ff1f646ac3c317b8777235fea6078905260c012507f606af9534c8bd2" exitCode=0 Oct 11 04:58:16 crc kubenswrapper[4651]: I1011 04:58:16.884491 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" event={"ID":"519a1ae1-e964-48b0-8b61-835146df28c1","Type":"ContainerDied","Data":"5ba3105ff1f646ac3c317b8777235fea6078905260c012507f606af9534c8bd2"} Oct 11 04:58:16 crc kubenswrapper[4651]: I1011 04:58:16.884779 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" event={"ID":"519a1ae1-e964-48b0-8b61-835146df28c1","Type":"ContainerStarted","Data":"4c4da425137942b26402e87d800a133130761ddfde8b5ad1911ac803f8d0758d"} Oct 11 04:58:16 crc kubenswrapper[4651]: I1011 04:58:16.884851 4651 scope.go:117] "RemoveContainer" containerID="583486a7d6efd8d25aaeac78ea92f84834730c641516b6ade77aeda2baef58e9" Oct 11 04:58:23 crc kubenswrapper[4651]: I1011 04:58:23.904457 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-hqzpc" Oct 11 04:58:23 crc kubenswrapper[4651]: I1011 04:58:23.987562 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-v5ktt"] Oct 11 04:58:49 crc kubenswrapper[4651]: I1011 04:58:49.041139 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-v5ktt" podUID="4adc8e3f-786a-4d1b-985b-3f39cf67767a" containerName="registry" containerID="cri-o://1a9e22811661cdbe2e5722a41440d79a8b4bc220ab96da3b1dcba7c13cb56bbb" gracePeriod=30 Oct 11 04:58:49 crc kubenswrapper[4651]: I1011 04:58:49.373677 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-v5ktt" Oct 11 04:58:49 crc kubenswrapper[4651]: I1011 04:58:49.522333 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/4adc8e3f-786a-4d1b-985b-3f39cf67767a-ca-trust-extracted\") pod \"4adc8e3f-786a-4d1b-985b-3f39cf67767a\" (UID: \"4adc8e3f-786a-4d1b-985b-3f39cf67767a\") " Oct 11 04:58:49 crc kubenswrapper[4651]: I1011 04:58:49.522412 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/4adc8e3f-786a-4d1b-985b-3f39cf67767a-installation-pull-secrets\") pod \"4adc8e3f-786a-4d1b-985b-3f39cf67767a\" (UID: \"4adc8e3f-786a-4d1b-985b-3f39cf67767a\") " Oct 11 04:58:49 crc kubenswrapper[4651]: I1011 04:58:49.522434 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fmksb\" (UniqueName: \"kubernetes.io/projected/4adc8e3f-786a-4d1b-985b-3f39cf67767a-kube-api-access-fmksb\") pod \"4adc8e3f-786a-4d1b-985b-3f39cf67767a\" (UID: \"4adc8e3f-786a-4d1b-985b-3f39cf67767a\") " Oct 11 04:58:49 crc kubenswrapper[4651]: I1011 04:58:49.522460 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4adc8e3f-786a-4d1b-985b-3f39cf67767a-trusted-ca\") pod \"4adc8e3f-786a-4d1b-985b-3f39cf67767a\" (UID: \"4adc8e3f-786a-4d1b-985b-3f39cf67767a\") " Oct 11 04:58:49 crc kubenswrapper[4651]: I1011 04:58:49.522605 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"4adc8e3f-786a-4d1b-985b-3f39cf67767a\" (UID: \"4adc8e3f-786a-4d1b-985b-3f39cf67767a\") " Oct 11 04:58:49 crc kubenswrapper[4651]: I1011 04:58:49.522635 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/4adc8e3f-786a-4d1b-985b-3f39cf67767a-registry-certificates\") pod \"4adc8e3f-786a-4d1b-985b-3f39cf67767a\" (UID: \"4adc8e3f-786a-4d1b-985b-3f39cf67767a\") " Oct 11 04:58:49 crc kubenswrapper[4651]: I1011 04:58:49.522653 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/4adc8e3f-786a-4d1b-985b-3f39cf67767a-bound-sa-token\") pod \"4adc8e3f-786a-4d1b-985b-3f39cf67767a\" (UID: \"4adc8e3f-786a-4d1b-985b-3f39cf67767a\") " Oct 11 04:58:49 crc kubenswrapper[4651]: I1011 04:58:49.522679 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/4adc8e3f-786a-4d1b-985b-3f39cf67767a-registry-tls\") pod \"4adc8e3f-786a-4d1b-985b-3f39cf67767a\" (UID: \"4adc8e3f-786a-4d1b-985b-3f39cf67767a\") " Oct 11 04:58:49 crc kubenswrapper[4651]: I1011 04:58:49.523572 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4adc8e3f-786a-4d1b-985b-3f39cf67767a-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "4adc8e3f-786a-4d1b-985b-3f39cf67767a" (UID: "4adc8e3f-786a-4d1b-985b-3f39cf67767a"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 04:58:49 crc kubenswrapper[4651]: I1011 04:58:49.524218 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4adc8e3f-786a-4d1b-985b-3f39cf67767a-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "4adc8e3f-786a-4d1b-985b-3f39cf67767a" (UID: "4adc8e3f-786a-4d1b-985b-3f39cf67767a"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 04:58:49 crc kubenswrapper[4651]: I1011 04:58:49.528753 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4adc8e3f-786a-4d1b-985b-3f39cf67767a-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "4adc8e3f-786a-4d1b-985b-3f39cf67767a" (UID: "4adc8e3f-786a-4d1b-985b-3f39cf67767a"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 04:58:49 crc kubenswrapper[4651]: I1011 04:58:49.528808 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4adc8e3f-786a-4d1b-985b-3f39cf67767a-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "4adc8e3f-786a-4d1b-985b-3f39cf67767a" (UID: "4adc8e3f-786a-4d1b-985b-3f39cf67767a"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 04:58:49 crc kubenswrapper[4651]: I1011 04:58:49.529051 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4adc8e3f-786a-4d1b-985b-3f39cf67767a-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "4adc8e3f-786a-4d1b-985b-3f39cf67767a" (UID: "4adc8e3f-786a-4d1b-985b-3f39cf67767a"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 04:58:49 crc kubenswrapper[4651]: I1011 04:58:49.529565 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4adc8e3f-786a-4d1b-985b-3f39cf67767a-kube-api-access-fmksb" (OuterVolumeSpecName: "kube-api-access-fmksb") pod "4adc8e3f-786a-4d1b-985b-3f39cf67767a" (UID: "4adc8e3f-786a-4d1b-985b-3f39cf67767a"). InnerVolumeSpecName "kube-api-access-fmksb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 04:58:49 crc kubenswrapper[4651]: I1011 04:58:49.532391 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "4adc8e3f-786a-4d1b-985b-3f39cf67767a" (UID: "4adc8e3f-786a-4d1b-985b-3f39cf67767a"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Oct 11 04:58:49 crc kubenswrapper[4651]: I1011 04:58:49.547867 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4adc8e3f-786a-4d1b-985b-3f39cf67767a-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "4adc8e3f-786a-4d1b-985b-3f39cf67767a" (UID: "4adc8e3f-786a-4d1b-985b-3f39cf67767a"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 04:58:49 crc kubenswrapper[4651]: I1011 04:58:49.624135 4651 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/4adc8e3f-786a-4d1b-985b-3f39cf67767a-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Oct 11 04:58:49 crc kubenswrapper[4651]: I1011 04:58:49.624472 4651 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/4adc8e3f-786a-4d1b-985b-3f39cf67767a-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Oct 11 04:58:49 crc kubenswrapper[4651]: I1011 04:58:49.624485 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fmksb\" (UniqueName: \"kubernetes.io/projected/4adc8e3f-786a-4d1b-985b-3f39cf67767a-kube-api-access-fmksb\") on node \"crc\" DevicePath \"\"" Oct 11 04:58:49 crc kubenswrapper[4651]: I1011 04:58:49.624495 4651 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4adc8e3f-786a-4d1b-985b-3f39cf67767a-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 11 04:58:49 crc kubenswrapper[4651]: I1011 04:58:49.624503 4651 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/4adc8e3f-786a-4d1b-985b-3f39cf67767a-registry-certificates\") on node \"crc\" DevicePath \"\"" Oct 11 04:58:49 crc kubenswrapper[4651]: I1011 04:58:49.624512 4651 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/4adc8e3f-786a-4d1b-985b-3f39cf67767a-bound-sa-token\") on node \"crc\" DevicePath \"\"" Oct 11 04:58:49 crc kubenswrapper[4651]: I1011 04:58:49.624521 4651 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/4adc8e3f-786a-4d1b-985b-3f39cf67767a-registry-tls\") on node \"crc\" DevicePath \"\"" Oct 11 04:58:50 crc kubenswrapper[4651]: I1011 04:58:50.112852 4651 generic.go:334] "Generic (PLEG): container finished" podID="4adc8e3f-786a-4d1b-985b-3f39cf67767a" containerID="1a9e22811661cdbe2e5722a41440d79a8b4bc220ab96da3b1dcba7c13cb56bbb" exitCode=0 Oct 11 04:58:50 crc kubenswrapper[4651]: I1011 04:58:50.112913 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-v5ktt" event={"ID":"4adc8e3f-786a-4d1b-985b-3f39cf67767a","Type":"ContainerDied","Data":"1a9e22811661cdbe2e5722a41440d79a8b4bc220ab96da3b1dcba7c13cb56bbb"} Oct 11 04:58:50 crc kubenswrapper[4651]: I1011 04:58:50.112950 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-v5ktt" event={"ID":"4adc8e3f-786a-4d1b-985b-3f39cf67767a","Type":"ContainerDied","Data":"44de1d63fda97bbeeeb7f56514e89de6c106c87ee188e1587f68f1f2a536bf94"} Oct 11 04:58:50 crc kubenswrapper[4651]: I1011 04:58:50.112983 4651 scope.go:117] "RemoveContainer" containerID="1a9e22811661cdbe2e5722a41440d79a8b4bc220ab96da3b1dcba7c13cb56bbb" Oct 11 04:58:50 crc kubenswrapper[4651]: I1011 04:58:50.113135 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-v5ktt" Oct 11 04:58:50 crc kubenswrapper[4651]: I1011 04:58:50.141241 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-v5ktt"] Oct 11 04:58:50 crc kubenswrapper[4651]: I1011 04:58:50.145209 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-v5ktt"] Oct 11 04:58:50 crc kubenswrapper[4651]: I1011 04:58:50.148938 4651 scope.go:117] "RemoveContainer" containerID="1a9e22811661cdbe2e5722a41440d79a8b4bc220ab96da3b1dcba7c13cb56bbb" Oct 11 04:58:50 crc kubenswrapper[4651]: E1011 04:58:50.149321 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1a9e22811661cdbe2e5722a41440d79a8b4bc220ab96da3b1dcba7c13cb56bbb\": container with ID starting with 1a9e22811661cdbe2e5722a41440d79a8b4bc220ab96da3b1dcba7c13cb56bbb not found: ID does not exist" containerID="1a9e22811661cdbe2e5722a41440d79a8b4bc220ab96da3b1dcba7c13cb56bbb" Oct 11 04:58:50 crc kubenswrapper[4651]: I1011 04:58:50.149368 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1a9e22811661cdbe2e5722a41440d79a8b4bc220ab96da3b1dcba7c13cb56bbb"} err="failed to get container status \"1a9e22811661cdbe2e5722a41440d79a8b4bc220ab96da3b1dcba7c13cb56bbb\": rpc error: code = NotFound desc = could not find container \"1a9e22811661cdbe2e5722a41440d79a8b4bc220ab96da3b1dcba7c13cb56bbb\": container with ID starting with 1a9e22811661cdbe2e5722a41440d79a8b4bc220ab96da3b1dcba7c13cb56bbb not found: ID does not exist" Oct 11 04:58:51 crc kubenswrapper[4651]: I1011 04:58:51.875462 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4adc8e3f-786a-4d1b-985b-3f39cf67767a" path="/var/lib/kubelet/pods/4adc8e3f-786a-4d1b-985b-3f39cf67767a/volumes" Oct 11 05:00:00 crc kubenswrapper[4651]: I1011 05:00:00.188437 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29335980-f69sw"] Oct 11 05:00:00 crc kubenswrapper[4651]: E1011 05:00:00.189402 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4adc8e3f-786a-4d1b-985b-3f39cf67767a" containerName="registry" Oct 11 05:00:00 crc kubenswrapper[4651]: I1011 05:00:00.189420 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="4adc8e3f-786a-4d1b-985b-3f39cf67767a" containerName="registry" Oct 11 05:00:00 crc kubenswrapper[4651]: I1011 05:00:00.189609 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="4adc8e3f-786a-4d1b-985b-3f39cf67767a" containerName="registry" Oct 11 05:00:00 crc kubenswrapper[4651]: I1011 05:00:00.190404 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29335980-f69sw" Oct 11 05:00:00 crc kubenswrapper[4651]: I1011 05:00:00.193004 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29335980-f69sw"] Oct 11 05:00:00 crc kubenswrapper[4651]: I1011 05:00:00.193210 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 11 05:00:00 crc kubenswrapper[4651]: I1011 05:00:00.193313 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 11 05:00:00 crc kubenswrapper[4651]: I1011 05:00:00.239410 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8efea488-0eb0-460f-bab5-c818de148253-config-volume\") pod \"collect-profiles-29335980-f69sw\" (UID: \"8efea488-0eb0-460f-bab5-c818de148253\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335980-f69sw" Oct 11 05:00:00 crc kubenswrapper[4651]: I1011 05:00:00.240038 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8qbnw\" (UniqueName: \"kubernetes.io/projected/8efea488-0eb0-460f-bab5-c818de148253-kube-api-access-8qbnw\") pod \"collect-profiles-29335980-f69sw\" (UID: \"8efea488-0eb0-460f-bab5-c818de148253\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335980-f69sw" Oct 11 05:00:00 crc kubenswrapper[4651]: I1011 05:00:00.240171 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8efea488-0eb0-460f-bab5-c818de148253-secret-volume\") pod \"collect-profiles-29335980-f69sw\" (UID: \"8efea488-0eb0-460f-bab5-c818de148253\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335980-f69sw" Oct 11 05:00:00 crc kubenswrapper[4651]: I1011 05:00:00.341225 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8efea488-0eb0-460f-bab5-c818de148253-secret-volume\") pod \"collect-profiles-29335980-f69sw\" (UID: \"8efea488-0eb0-460f-bab5-c818de148253\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335980-f69sw" Oct 11 05:00:00 crc kubenswrapper[4651]: I1011 05:00:00.341325 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8efea488-0eb0-460f-bab5-c818de148253-config-volume\") pod \"collect-profiles-29335980-f69sw\" (UID: \"8efea488-0eb0-460f-bab5-c818de148253\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335980-f69sw" Oct 11 05:00:00 crc kubenswrapper[4651]: I1011 05:00:00.341353 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8qbnw\" (UniqueName: \"kubernetes.io/projected/8efea488-0eb0-460f-bab5-c818de148253-kube-api-access-8qbnw\") pod \"collect-profiles-29335980-f69sw\" (UID: \"8efea488-0eb0-460f-bab5-c818de148253\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335980-f69sw" Oct 11 05:00:00 crc kubenswrapper[4651]: I1011 05:00:00.342400 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8efea488-0eb0-460f-bab5-c818de148253-config-volume\") pod \"collect-profiles-29335980-f69sw\" (UID: \"8efea488-0eb0-460f-bab5-c818de148253\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335980-f69sw" Oct 11 05:00:00 crc kubenswrapper[4651]: I1011 05:00:00.347999 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8efea488-0eb0-460f-bab5-c818de148253-secret-volume\") pod \"collect-profiles-29335980-f69sw\" (UID: \"8efea488-0eb0-460f-bab5-c818de148253\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335980-f69sw" Oct 11 05:00:00 crc kubenswrapper[4651]: I1011 05:00:00.356147 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8qbnw\" (UniqueName: \"kubernetes.io/projected/8efea488-0eb0-460f-bab5-c818de148253-kube-api-access-8qbnw\") pod \"collect-profiles-29335980-f69sw\" (UID: \"8efea488-0eb0-460f-bab5-c818de148253\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335980-f69sw" Oct 11 05:00:00 crc kubenswrapper[4651]: I1011 05:00:00.518976 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29335980-f69sw" Oct 11 05:00:00 crc kubenswrapper[4651]: I1011 05:00:00.673328 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29335980-f69sw"] Oct 11 05:00:01 crc kubenswrapper[4651]: I1011 05:00:01.548636 4651 generic.go:334] "Generic (PLEG): container finished" podID="8efea488-0eb0-460f-bab5-c818de148253" containerID="17ca8604931a040d17964403349a1dd93fa8f19eb8708f131e02b8db1b5015d6" exitCode=0 Oct 11 05:00:01 crc kubenswrapper[4651]: I1011 05:00:01.548701 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29335980-f69sw" event={"ID":"8efea488-0eb0-460f-bab5-c818de148253","Type":"ContainerDied","Data":"17ca8604931a040d17964403349a1dd93fa8f19eb8708f131e02b8db1b5015d6"} Oct 11 05:00:01 crc kubenswrapper[4651]: I1011 05:00:01.548740 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29335980-f69sw" event={"ID":"8efea488-0eb0-460f-bab5-c818de148253","Type":"ContainerStarted","Data":"fff886b742d9a4ecae161d2a7532ed2a9212b46929a61e74ac72b13786caac75"} Oct 11 05:00:02 crc kubenswrapper[4651]: I1011 05:00:02.800795 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29335980-f69sw" Oct 11 05:00:02 crc kubenswrapper[4651]: I1011 05:00:02.911557 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8qbnw\" (UniqueName: \"kubernetes.io/projected/8efea488-0eb0-460f-bab5-c818de148253-kube-api-access-8qbnw\") pod \"8efea488-0eb0-460f-bab5-c818de148253\" (UID: \"8efea488-0eb0-460f-bab5-c818de148253\") " Oct 11 05:00:02 crc kubenswrapper[4651]: I1011 05:00:02.911651 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8efea488-0eb0-460f-bab5-c818de148253-config-volume\") pod \"8efea488-0eb0-460f-bab5-c818de148253\" (UID: \"8efea488-0eb0-460f-bab5-c818de148253\") " Oct 11 05:00:02 crc kubenswrapper[4651]: I1011 05:00:02.911765 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8efea488-0eb0-460f-bab5-c818de148253-secret-volume\") pod \"8efea488-0eb0-460f-bab5-c818de148253\" (UID: \"8efea488-0eb0-460f-bab5-c818de148253\") " Oct 11 05:00:02 crc kubenswrapper[4651]: I1011 05:00:02.912705 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8efea488-0eb0-460f-bab5-c818de148253-config-volume" (OuterVolumeSpecName: "config-volume") pod "8efea488-0eb0-460f-bab5-c818de148253" (UID: "8efea488-0eb0-460f-bab5-c818de148253"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:00:02 crc kubenswrapper[4651]: I1011 05:00:02.922321 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8efea488-0eb0-460f-bab5-c818de148253-kube-api-access-8qbnw" (OuterVolumeSpecName: "kube-api-access-8qbnw") pod "8efea488-0eb0-460f-bab5-c818de148253" (UID: "8efea488-0eb0-460f-bab5-c818de148253"). InnerVolumeSpecName "kube-api-access-8qbnw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:00:02 crc kubenswrapper[4651]: I1011 05:00:02.923093 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8efea488-0eb0-460f-bab5-c818de148253-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "8efea488-0eb0-460f-bab5-c818de148253" (UID: "8efea488-0eb0-460f-bab5-c818de148253"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:00:03 crc kubenswrapper[4651]: I1011 05:00:03.013418 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8qbnw\" (UniqueName: \"kubernetes.io/projected/8efea488-0eb0-460f-bab5-c818de148253-kube-api-access-8qbnw\") on node \"crc\" DevicePath \"\"" Oct 11 05:00:03 crc kubenswrapper[4651]: I1011 05:00:03.013477 4651 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8efea488-0eb0-460f-bab5-c818de148253-config-volume\") on node \"crc\" DevicePath \"\"" Oct 11 05:00:03 crc kubenswrapper[4651]: I1011 05:00:03.013491 4651 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8efea488-0eb0-460f-bab5-c818de148253-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 11 05:00:03 crc kubenswrapper[4651]: I1011 05:00:03.561356 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29335980-f69sw" event={"ID":"8efea488-0eb0-460f-bab5-c818de148253","Type":"ContainerDied","Data":"fff886b742d9a4ecae161d2a7532ed2a9212b46929a61e74ac72b13786caac75"} Oct 11 05:00:03 crc kubenswrapper[4651]: I1011 05:00:03.561395 4651 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fff886b742d9a4ecae161d2a7532ed2a9212b46929a61e74ac72b13786caac75" Oct 11 05:00:03 crc kubenswrapper[4651]: I1011 05:00:03.561439 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29335980-f69sw" Oct 11 05:00:16 crc kubenswrapper[4651]: I1011 05:00:16.309684 4651 patch_prober.go:28] interesting pod/machine-config-daemon-78jnv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 05:00:16 crc kubenswrapper[4651]: I1011 05:00:16.310244 4651 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 05:00:46 crc kubenswrapper[4651]: I1011 05:00:46.310266 4651 patch_prober.go:28] interesting pod/machine-config-daemon-78jnv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 05:00:46 crc kubenswrapper[4651]: I1011 05:00:46.311074 4651 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 05:01:03 crc kubenswrapper[4651]: I1011 05:01:03.275853 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-5t4wt"] Oct 11 05:01:03 crc kubenswrapper[4651]: E1011 05:01:03.276708 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8efea488-0eb0-460f-bab5-c818de148253" containerName="collect-profiles" Oct 11 05:01:03 crc kubenswrapper[4651]: I1011 05:01:03.276723 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="8efea488-0eb0-460f-bab5-c818de148253" containerName="collect-profiles" Oct 11 05:01:03 crc kubenswrapper[4651]: I1011 05:01:03.276867 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="8efea488-0eb0-460f-bab5-c818de148253" containerName="collect-profiles" Oct 11 05:01:03 crc kubenswrapper[4651]: I1011 05:01:03.277248 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-5t4wt" Oct 11 05:01:03 crc kubenswrapper[4651]: I1011 05:01:03.278954 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Oct 11 05:01:03 crc kubenswrapper[4651]: I1011 05:01:03.285606 4651 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-bpxb5" Oct 11 05:01:03 crc kubenswrapper[4651]: I1011 05:01:03.289647 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-5b446d88c5-pmm46"] Oct 11 05:01:03 crc kubenswrapper[4651]: I1011 05:01:03.291637 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Oct 11 05:01:03 crc kubenswrapper[4651]: I1011 05:01:03.293686 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-pmm46" Oct 11 05:01:03 crc kubenswrapper[4651]: I1011 05:01:03.298044 4651 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-82s9c" Oct 11 05:01:03 crc kubenswrapper[4651]: I1011 05:01:03.322097 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-5t4wt"] Oct 11 05:01:03 crc kubenswrapper[4651]: I1011 05:01:03.328170 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-pmm46"] Oct 11 05:01:03 crc kubenswrapper[4651]: I1011 05:01:03.332714 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-l859c"] Oct 11 05:01:03 crc kubenswrapper[4651]: I1011 05:01:03.333437 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-l859c" Oct 11 05:01:03 crc kubenswrapper[4651]: I1011 05:01:03.336682 4651 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-ncvb9" Oct 11 05:01:03 crc kubenswrapper[4651]: I1011 05:01:03.338547 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-l859c"] Oct 11 05:01:03 crc kubenswrapper[4651]: I1011 05:01:03.395803 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t252f\" (UniqueName: \"kubernetes.io/projected/4045e2c1-af15-42ee-bfee-f72d32924237-kube-api-access-t252f\") pod \"cert-manager-cainjector-7f985d654d-5t4wt\" (UID: \"4045e2c1-af15-42ee-bfee-f72d32924237\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-5t4wt" Oct 11 05:01:03 crc kubenswrapper[4651]: I1011 05:01:03.395939 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-68txp\" (UniqueName: \"kubernetes.io/projected/d483b46d-edb9-4b36-b0a4-3c959e0f6aca-kube-api-access-68txp\") pod \"cert-manager-5b446d88c5-pmm46\" (UID: \"d483b46d-edb9-4b36-b0a4-3c959e0f6aca\") " pod="cert-manager/cert-manager-5b446d88c5-pmm46" Oct 11 05:01:03 crc kubenswrapper[4651]: I1011 05:01:03.497142 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wzk84\" (UniqueName: \"kubernetes.io/projected/54d95539-bc93-45d6-a26a-95284f123cde-kube-api-access-wzk84\") pod \"cert-manager-webhook-5655c58dd6-l859c\" (UID: \"54d95539-bc93-45d6-a26a-95284f123cde\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-l859c" Oct 11 05:01:03 crc kubenswrapper[4651]: I1011 05:01:03.497222 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t252f\" (UniqueName: \"kubernetes.io/projected/4045e2c1-af15-42ee-bfee-f72d32924237-kube-api-access-t252f\") pod \"cert-manager-cainjector-7f985d654d-5t4wt\" (UID: \"4045e2c1-af15-42ee-bfee-f72d32924237\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-5t4wt" Oct 11 05:01:03 crc kubenswrapper[4651]: I1011 05:01:03.497279 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-68txp\" (UniqueName: \"kubernetes.io/projected/d483b46d-edb9-4b36-b0a4-3c959e0f6aca-kube-api-access-68txp\") pod \"cert-manager-5b446d88c5-pmm46\" (UID: \"d483b46d-edb9-4b36-b0a4-3c959e0f6aca\") " pod="cert-manager/cert-manager-5b446d88c5-pmm46" Oct 11 05:01:03 crc kubenswrapper[4651]: I1011 05:01:03.519138 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-68txp\" (UniqueName: \"kubernetes.io/projected/d483b46d-edb9-4b36-b0a4-3c959e0f6aca-kube-api-access-68txp\") pod \"cert-manager-5b446d88c5-pmm46\" (UID: \"d483b46d-edb9-4b36-b0a4-3c959e0f6aca\") " pod="cert-manager/cert-manager-5b446d88c5-pmm46" Oct 11 05:01:03 crc kubenswrapper[4651]: I1011 05:01:03.519482 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t252f\" (UniqueName: \"kubernetes.io/projected/4045e2c1-af15-42ee-bfee-f72d32924237-kube-api-access-t252f\") pod \"cert-manager-cainjector-7f985d654d-5t4wt\" (UID: \"4045e2c1-af15-42ee-bfee-f72d32924237\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-5t4wt" Oct 11 05:01:03 crc kubenswrapper[4651]: I1011 05:01:03.598768 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wzk84\" (UniqueName: \"kubernetes.io/projected/54d95539-bc93-45d6-a26a-95284f123cde-kube-api-access-wzk84\") pod \"cert-manager-webhook-5655c58dd6-l859c\" (UID: \"54d95539-bc93-45d6-a26a-95284f123cde\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-l859c" Oct 11 05:01:03 crc kubenswrapper[4651]: I1011 05:01:03.622693 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wzk84\" (UniqueName: \"kubernetes.io/projected/54d95539-bc93-45d6-a26a-95284f123cde-kube-api-access-wzk84\") pod \"cert-manager-webhook-5655c58dd6-l859c\" (UID: \"54d95539-bc93-45d6-a26a-95284f123cde\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-l859c" Oct 11 05:01:03 crc kubenswrapper[4651]: I1011 05:01:03.624559 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-5t4wt" Oct 11 05:01:03 crc kubenswrapper[4651]: I1011 05:01:03.631465 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-pmm46" Oct 11 05:01:03 crc kubenswrapper[4651]: I1011 05:01:03.645713 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-l859c" Oct 11 05:01:03 crc kubenswrapper[4651]: I1011 05:01:03.857180 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-pmm46"] Oct 11 05:01:03 crc kubenswrapper[4651]: I1011 05:01:03.874236 4651 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 11 05:01:03 crc kubenswrapper[4651]: I1011 05:01:03.891611 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-5t4wt"] Oct 11 05:01:03 crc kubenswrapper[4651]: I1011 05:01:03.941375 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-l859c"] Oct 11 05:01:03 crc kubenswrapper[4651]: W1011 05:01:03.945709 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod54d95539_bc93_45d6_a26a_95284f123cde.slice/crio-fd11aec06249be033880fbd77afede6b4788c18fd1e43c2aafbbd17d3c26acfd WatchSource:0}: Error finding container fd11aec06249be033880fbd77afede6b4788c18fd1e43c2aafbbd17d3c26acfd: Status 404 returned error can't find the container with id fd11aec06249be033880fbd77afede6b4788c18fd1e43c2aafbbd17d3c26acfd Oct 11 05:01:03 crc kubenswrapper[4651]: I1011 05:01:03.963862 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-5t4wt" event={"ID":"4045e2c1-af15-42ee-bfee-f72d32924237","Type":"ContainerStarted","Data":"3b8ba8ac49f365f798d9bfc0dbfc3a596a6a6a3eac14d7947ef669f9e9aa24d6"} Oct 11 05:01:03 crc kubenswrapper[4651]: I1011 05:01:03.964810 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-l859c" event={"ID":"54d95539-bc93-45d6-a26a-95284f123cde","Type":"ContainerStarted","Data":"fd11aec06249be033880fbd77afede6b4788c18fd1e43c2aafbbd17d3c26acfd"} Oct 11 05:01:03 crc kubenswrapper[4651]: I1011 05:01:03.966305 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-pmm46" event={"ID":"d483b46d-edb9-4b36-b0a4-3c959e0f6aca","Type":"ContainerStarted","Data":"3dc233cfb1e140e9579679d93fbdaf86685ea4ac75642c54a9f955cb60d36b18"} Oct 11 05:01:07 crc kubenswrapper[4651]: I1011 05:01:07.986664 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-l859c" event={"ID":"54d95539-bc93-45d6-a26a-95284f123cde","Type":"ContainerStarted","Data":"11ce3fbed5b18d8b0ac2c88c89baae25bec57d282c1e6793753b3ba8a9066002"} Oct 11 05:01:07 crc kubenswrapper[4651]: I1011 05:01:07.988056 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-5655c58dd6-l859c" Oct 11 05:01:07 crc kubenswrapper[4651]: I1011 05:01:07.989908 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-pmm46" event={"ID":"d483b46d-edb9-4b36-b0a4-3c959e0f6aca","Type":"ContainerStarted","Data":"b5cbe9437cbba739164419aca4992dfee9c9d76d12a79a002dca4cfa896daebd"} Oct 11 05:01:07 crc kubenswrapper[4651]: I1011 05:01:07.992008 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-5t4wt" event={"ID":"4045e2c1-af15-42ee-bfee-f72d32924237","Type":"ContainerStarted","Data":"aa6525826e255832396568cb20d3f462ff9eed6d9039c28f9fb62c2e6277da3c"} Oct 11 05:01:08 crc kubenswrapper[4651]: I1011 05:01:08.023496 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-7f985d654d-5t4wt" podStartSLOduration=1.9854248989999999 podStartE2EDuration="5.02347974s" podCreationTimestamp="2025-10-11 05:01:03 +0000 UTC" firstStartedPulling="2025-10-11 05:01:03.903459171 +0000 UTC m=+584.799691967" lastFinishedPulling="2025-10-11 05:01:06.941514012 +0000 UTC m=+587.837746808" observedRunningTime="2025-10-11 05:01:08.019962413 +0000 UTC m=+588.916195249" watchObservedRunningTime="2025-10-11 05:01:08.02347974 +0000 UTC m=+588.919712526" Oct 11 05:01:08 crc kubenswrapper[4651]: I1011 05:01:08.024886 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-5655c58dd6-l859c" podStartSLOduration=2.096119552 podStartE2EDuration="5.024879675s" podCreationTimestamp="2025-10-11 05:01:03 +0000 UTC" firstStartedPulling="2025-10-11 05:01:03.948023809 +0000 UTC m=+584.844256605" lastFinishedPulling="2025-10-11 05:01:06.876783942 +0000 UTC m=+587.773016728" observedRunningTime="2025-10-11 05:01:08.007249287 +0000 UTC m=+588.903482093" watchObservedRunningTime="2025-10-11 05:01:08.024879675 +0000 UTC m=+588.921112471" Oct 11 05:01:08 crc kubenswrapper[4651]: I1011 05:01:08.039339 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-5b446d88c5-pmm46" podStartSLOduration=2.036521508 podStartE2EDuration="5.039322814s" podCreationTimestamp="2025-10-11 05:01:03 +0000 UTC" firstStartedPulling="2025-10-11 05:01:03.873987636 +0000 UTC m=+584.770220432" lastFinishedPulling="2025-10-11 05:01:06.876788942 +0000 UTC m=+587.773021738" observedRunningTime="2025-10-11 05:01:08.038863833 +0000 UTC m=+588.935096689" watchObservedRunningTime="2025-10-11 05:01:08.039322814 +0000 UTC m=+588.935555610" Oct 11 05:01:13 crc kubenswrapper[4651]: I1011 05:01:13.650129 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-5655c58dd6-l859c" Oct 11 05:01:13 crc kubenswrapper[4651]: I1011 05:01:13.723369 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-6zt9s"] Oct 11 05:01:13 crc kubenswrapper[4651]: I1011 05:01:13.724267 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" podUID="28e01c08-a461-4f44-a49c-4bf92fd3a2ce" containerName="ovn-controller" containerID="cri-o://99f927067aabe4690e99fe50a069afdf68b22d950a8ae141235571ced468c44e" gracePeriod=30 Oct 11 05:01:13 crc kubenswrapper[4651]: I1011 05:01:13.724318 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" podUID="28e01c08-a461-4f44-a49c-4bf92fd3a2ce" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://788d6a46d740de869fe7cd1d28479f8115a3515c7bd2482e06cbc4dba7b27fac" gracePeriod=30 Oct 11 05:01:13 crc kubenswrapper[4651]: I1011 05:01:13.724324 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" podUID="28e01c08-a461-4f44-a49c-4bf92fd3a2ce" containerName="nbdb" containerID="cri-o://68954082d4369b9b4d95ba1f15e19593dc330b698562be413055b2ba012b5f8c" gracePeriod=30 Oct 11 05:01:13 crc kubenswrapper[4651]: I1011 05:01:13.724393 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" podUID="28e01c08-a461-4f44-a49c-4bf92fd3a2ce" containerName="northd" containerID="cri-o://3eed8335754437c935f07223bf4c4f40f9ab0bbdc9a86b7610f7f6fd55140e37" gracePeriod=30 Oct 11 05:01:13 crc kubenswrapper[4651]: I1011 05:01:13.724467 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" podUID="28e01c08-a461-4f44-a49c-4bf92fd3a2ce" containerName="sbdb" containerID="cri-o://add3ed68e0bf59bc569248a3660cfc9a20995641ec65b0bf7133e47c366aad73" gracePeriod=30 Oct 11 05:01:13 crc kubenswrapper[4651]: I1011 05:01:13.724490 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" podUID="28e01c08-a461-4f44-a49c-4bf92fd3a2ce" containerName="kube-rbac-proxy-node" containerID="cri-o://0fd0aaae2760a1c934aed54b8e07528f0f78db1149c9e1f8674bec90b61a7078" gracePeriod=30 Oct 11 05:01:13 crc kubenswrapper[4651]: I1011 05:01:13.724540 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" podUID="28e01c08-a461-4f44-a49c-4bf92fd3a2ce" containerName="ovn-acl-logging" containerID="cri-o://23b3fde15e637081cbf5fad2e55ccea1f0fe63d39ff5f82a60ce05ffeef9a837" gracePeriod=30 Oct 11 05:01:13 crc kubenswrapper[4651]: I1011 05:01:13.764434 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" podUID="28e01c08-a461-4f44-a49c-4bf92fd3a2ce" containerName="ovnkube-controller" containerID="cri-o://ff59b524a1c2589496d6c2c71a6ce2ad3b7b309b0e6e25f0869d10c12a5a877e" gracePeriod=30 Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.033783 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-wz4hw_fbfdd781-994b-49b4-9c8e-edc0ea4145d1/kube-multus/2.log" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.034331 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-wz4hw_fbfdd781-994b-49b4-9c8e-edc0ea4145d1/kube-multus/1.log" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.034389 4651 generic.go:334] "Generic (PLEG): container finished" podID="fbfdd781-994b-49b4-9c8e-edc0ea4145d1" containerID="593f6c9a7505c4ee7c5c917e6b485fb997517f68d952455303c896adae9f2391" exitCode=2 Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.034485 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-wz4hw" event={"ID":"fbfdd781-994b-49b4-9c8e-edc0ea4145d1","Type":"ContainerDied","Data":"593f6c9a7505c4ee7c5c917e6b485fb997517f68d952455303c896adae9f2391"} Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.034575 4651 scope.go:117] "RemoveContainer" containerID="453b8d5da6858078639895d4d19bb1783eefeeb7c558eec6984f6ebdadd5d8fc" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.035480 4651 scope.go:117] "RemoveContainer" containerID="593f6c9a7505c4ee7c5c917e6b485fb997517f68d952455303c896adae9f2391" Oct 11 05:01:14 crc kubenswrapper[4651]: E1011 05:01:14.035886 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-wz4hw_openshift-multus(fbfdd781-994b-49b4-9c8e-edc0ea4145d1)\"" pod="openshift-multus/multus-wz4hw" podUID="fbfdd781-994b-49b4-9c8e-edc0ea4145d1" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.036746 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6zt9s_28e01c08-a461-4f44-a49c-4bf92fd3a2ce/ovnkube-controller/3.log" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.039441 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6zt9s_28e01c08-a461-4f44-a49c-4bf92fd3a2ce/ovn-acl-logging/0.log" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.040299 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6zt9s_28e01c08-a461-4f44-a49c-4bf92fd3a2ce/ovn-controller/0.log" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.041165 4651 generic.go:334] "Generic (PLEG): container finished" podID="28e01c08-a461-4f44-a49c-4bf92fd3a2ce" containerID="ff59b524a1c2589496d6c2c71a6ce2ad3b7b309b0e6e25f0869d10c12a5a877e" exitCode=0 Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.041646 4651 generic.go:334] "Generic (PLEG): container finished" podID="28e01c08-a461-4f44-a49c-4bf92fd3a2ce" containerID="add3ed68e0bf59bc569248a3660cfc9a20995641ec65b0bf7133e47c366aad73" exitCode=0 Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.041724 4651 generic.go:334] "Generic (PLEG): container finished" podID="28e01c08-a461-4f44-a49c-4bf92fd3a2ce" containerID="68954082d4369b9b4d95ba1f15e19593dc330b698562be413055b2ba012b5f8c" exitCode=0 Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.041740 4651 generic.go:334] "Generic (PLEG): container finished" podID="28e01c08-a461-4f44-a49c-4bf92fd3a2ce" containerID="3eed8335754437c935f07223bf4c4f40f9ab0bbdc9a86b7610f7f6fd55140e37" exitCode=0 Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.041750 4651 generic.go:334] "Generic (PLEG): container finished" podID="28e01c08-a461-4f44-a49c-4bf92fd3a2ce" containerID="788d6a46d740de869fe7cd1d28479f8115a3515c7bd2482e06cbc4dba7b27fac" exitCode=0 Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.041759 4651 generic.go:334] "Generic (PLEG): container finished" podID="28e01c08-a461-4f44-a49c-4bf92fd3a2ce" containerID="0fd0aaae2760a1c934aed54b8e07528f0f78db1149c9e1f8674bec90b61a7078" exitCode=0 Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.041788 4651 generic.go:334] "Generic (PLEG): container finished" podID="28e01c08-a461-4f44-a49c-4bf92fd3a2ce" containerID="23b3fde15e637081cbf5fad2e55ccea1f0fe63d39ff5f82a60ce05ffeef9a837" exitCode=143 Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.041801 4651 generic.go:334] "Generic (PLEG): container finished" podID="28e01c08-a461-4f44-a49c-4bf92fd3a2ce" containerID="99f927067aabe4690e99fe50a069afdf68b22d950a8ae141235571ced468c44e" exitCode=143 Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.041844 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" event={"ID":"28e01c08-a461-4f44-a49c-4bf92fd3a2ce","Type":"ContainerDied","Data":"ff59b524a1c2589496d6c2c71a6ce2ad3b7b309b0e6e25f0869d10c12a5a877e"} Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.041878 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" event={"ID":"28e01c08-a461-4f44-a49c-4bf92fd3a2ce","Type":"ContainerDied","Data":"add3ed68e0bf59bc569248a3660cfc9a20995641ec65b0bf7133e47c366aad73"} Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.041899 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" event={"ID":"28e01c08-a461-4f44-a49c-4bf92fd3a2ce","Type":"ContainerDied","Data":"68954082d4369b9b4d95ba1f15e19593dc330b698562be413055b2ba012b5f8c"} Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.041912 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" event={"ID":"28e01c08-a461-4f44-a49c-4bf92fd3a2ce","Type":"ContainerDied","Data":"3eed8335754437c935f07223bf4c4f40f9ab0bbdc9a86b7610f7f6fd55140e37"} Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.041923 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" event={"ID":"28e01c08-a461-4f44-a49c-4bf92fd3a2ce","Type":"ContainerDied","Data":"788d6a46d740de869fe7cd1d28479f8115a3515c7bd2482e06cbc4dba7b27fac"} Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.041934 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" event={"ID":"28e01c08-a461-4f44-a49c-4bf92fd3a2ce","Type":"ContainerDied","Data":"0fd0aaae2760a1c934aed54b8e07528f0f78db1149c9e1f8674bec90b61a7078"} Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.041946 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" event={"ID":"28e01c08-a461-4f44-a49c-4bf92fd3a2ce","Type":"ContainerDied","Data":"23b3fde15e637081cbf5fad2e55ccea1f0fe63d39ff5f82a60ce05ffeef9a837"} Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.041960 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" event={"ID":"28e01c08-a461-4f44-a49c-4bf92fd3a2ce","Type":"ContainerDied","Data":"99f927067aabe4690e99fe50a069afdf68b22d950a8ae141235571ced468c44e"} Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.041975 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" event={"ID":"28e01c08-a461-4f44-a49c-4bf92fd3a2ce","Type":"ContainerDied","Data":"65db520198da135ed934537a30d0613d7c412e2bf9d161335159f0654ffcb29a"} Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.041987 4651 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="65db520198da135ed934537a30d0613d7c412e2bf9d161335159f0654ffcb29a" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.058194 4651 scope.go:117] "RemoveContainer" containerID="bedbf9638749a3e9eca8260d325aaa6064bbb0ce8ea6e1cd6042e0039175c968" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.066509 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6zt9s_28e01c08-a461-4f44-a49c-4bf92fd3a2ce/ovn-acl-logging/0.log" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.067680 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6zt9s_28e01c08-a461-4f44-a49c-4bf92fd3a2ce/ovn-controller/0.log" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.069423 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.116256 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-xt42g"] Oct 11 05:01:14 crc kubenswrapper[4651]: E1011 05:01:14.116554 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="28e01c08-a461-4f44-a49c-4bf92fd3a2ce" containerName="ovnkube-controller" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.116582 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="28e01c08-a461-4f44-a49c-4bf92fd3a2ce" containerName="ovnkube-controller" Oct 11 05:01:14 crc kubenswrapper[4651]: E1011 05:01:14.116597 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="28e01c08-a461-4f44-a49c-4bf92fd3a2ce" containerName="kubecfg-setup" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.116608 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="28e01c08-a461-4f44-a49c-4bf92fd3a2ce" containerName="kubecfg-setup" Oct 11 05:01:14 crc kubenswrapper[4651]: E1011 05:01:14.116618 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="28e01c08-a461-4f44-a49c-4bf92fd3a2ce" containerName="nbdb" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.116630 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="28e01c08-a461-4f44-a49c-4bf92fd3a2ce" containerName="nbdb" Oct 11 05:01:14 crc kubenswrapper[4651]: E1011 05:01:14.116647 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="28e01c08-a461-4f44-a49c-4bf92fd3a2ce" containerName="sbdb" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.116657 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="28e01c08-a461-4f44-a49c-4bf92fd3a2ce" containerName="sbdb" Oct 11 05:01:14 crc kubenswrapper[4651]: E1011 05:01:14.116672 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="28e01c08-a461-4f44-a49c-4bf92fd3a2ce" containerName="ovnkube-controller" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.116683 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="28e01c08-a461-4f44-a49c-4bf92fd3a2ce" containerName="ovnkube-controller" Oct 11 05:01:14 crc kubenswrapper[4651]: E1011 05:01:14.116702 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="28e01c08-a461-4f44-a49c-4bf92fd3a2ce" containerName="northd" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.116711 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="28e01c08-a461-4f44-a49c-4bf92fd3a2ce" containerName="northd" Oct 11 05:01:14 crc kubenswrapper[4651]: E1011 05:01:14.116722 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="28e01c08-a461-4f44-a49c-4bf92fd3a2ce" containerName="kube-rbac-proxy-node" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.116731 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="28e01c08-a461-4f44-a49c-4bf92fd3a2ce" containerName="kube-rbac-proxy-node" Oct 11 05:01:14 crc kubenswrapper[4651]: E1011 05:01:14.116744 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="28e01c08-a461-4f44-a49c-4bf92fd3a2ce" containerName="ovn-acl-logging" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.116757 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="28e01c08-a461-4f44-a49c-4bf92fd3a2ce" containerName="ovn-acl-logging" Oct 11 05:01:14 crc kubenswrapper[4651]: E1011 05:01:14.116770 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="28e01c08-a461-4f44-a49c-4bf92fd3a2ce" containerName="ovn-controller" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.116782 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="28e01c08-a461-4f44-a49c-4bf92fd3a2ce" containerName="ovn-controller" Oct 11 05:01:14 crc kubenswrapper[4651]: E1011 05:01:14.116799 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="28e01c08-a461-4f44-a49c-4bf92fd3a2ce" containerName="kube-rbac-proxy-ovn-metrics" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.116808 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="28e01c08-a461-4f44-a49c-4bf92fd3a2ce" containerName="kube-rbac-proxy-ovn-metrics" Oct 11 05:01:14 crc kubenswrapper[4651]: E1011 05:01:14.116843 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="28e01c08-a461-4f44-a49c-4bf92fd3a2ce" containerName="ovnkube-controller" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.116854 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="28e01c08-a461-4f44-a49c-4bf92fd3a2ce" containerName="ovnkube-controller" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.117009 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="28e01c08-a461-4f44-a49c-4bf92fd3a2ce" containerName="ovnkube-controller" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.117027 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="28e01c08-a461-4f44-a49c-4bf92fd3a2ce" containerName="sbdb" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.117040 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="28e01c08-a461-4f44-a49c-4bf92fd3a2ce" containerName="ovn-acl-logging" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.117055 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="28e01c08-a461-4f44-a49c-4bf92fd3a2ce" containerName="ovn-controller" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.117072 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="28e01c08-a461-4f44-a49c-4bf92fd3a2ce" containerName="kube-rbac-proxy-ovn-metrics" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.117083 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="28e01c08-a461-4f44-a49c-4bf92fd3a2ce" containerName="ovnkube-controller" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.117095 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="28e01c08-a461-4f44-a49c-4bf92fd3a2ce" containerName="ovnkube-controller" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.117106 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="28e01c08-a461-4f44-a49c-4bf92fd3a2ce" containerName="nbdb" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.117115 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="28e01c08-a461-4f44-a49c-4bf92fd3a2ce" containerName="ovnkube-controller" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.117126 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="28e01c08-a461-4f44-a49c-4bf92fd3a2ce" containerName="northd" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.117142 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="28e01c08-a461-4f44-a49c-4bf92fd3a2ce" containerName="kube-rbac-proxy-node" Oct 11 05:01:14 crc kubenswrapper[4651]: E1011 05:01:14.117279 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="28e01c08-a461-4f44-a49c-4bf92fd3a2ce" containerName="ovnkube-controller" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.117289 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="28e01c08-a461-4f44-a49c-4bf92fd3a2ce" containerName="ovnkube-controller" Oct 11 05:01:14 crc kubenswrapper[4651]: E1011 05:01:14.117302 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="28e01c08-a461-4f44-a49c-4bf92fd3a2ce" containerName="ovnkube-controller" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.117310 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="28e01c08-a461-4f44-a49c-4bf92fd3a2ce" containerName="ovnkube-controller" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.117422 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="28e01c08-a461-4f44-a49c-4bf92fd3a2ce" containerName="ovnkube-controller" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.120236 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-xt42g" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.247620 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-host-cni-netd\") pod \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\" (UID: \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\") " Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.247679 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-host-run-netns\") pod \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\" (UID: \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\") " Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.247717 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-ovn-node-metrics-cert\") pod \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\" (UID: \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\") " Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.247743 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-systemd-units\") pod \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\" (UID: \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\") " Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.247773 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-ovnkube-config\") pod \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\" (UID: \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\") " Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.247794 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-run-ovn\") pod \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\" (UID: \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\") " Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.247785 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "28e01c08-a461-4f44-a49c-4bf92fd3a2ce" (UID: "28e01c08-a461-4f44-a49c-4bf92fd3a2ce"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.247812 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-log-socket\") pod \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\" (UID: \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\") " Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.247866 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-log-socket" (OuterVolumeSpecName: "log-socket") pod "28e01c08-a461-4f44-a49c-4bf92fd3a2ce" (UID: "28e01c08-a461-4f44-a49c-4bf92fd3a2ce"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.247894 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "28e01c08-a461-4f44-a49c-4bf92fd3a2ce" (UID: "28e01c08-a461-4f44-a49c-4bf92fd3a2ce"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.247922 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-host-cni-bin\") pod \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\" (UID: \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\") " Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.247910 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "28e01c08-a461-4f44-a49c-4bf92fd3a2ce" (UID: "28e01c08-a461-4f44-a49c-4bf92fd3a2ce"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.247955 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-host-kubelet\") pod \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\" (UID: \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\") " Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.247965 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "28e01c08-a461-4f44-a49c-4bf92fd3a2ce" (UID: "28e01c08-a461-4f44-a49c-4bf92fd3a2ce"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.247993 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-node-log\") pod \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\" (UID: \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\") " Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.248019 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-run-systemd\") pod \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\" (UID: \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\") " Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.248021 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "28e01c08-a461-4f44-a49c-4bf92fd3a2ce" (UID: "28e01c08-a461-4f44-a49c-4bf92fd3a2ce"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.248051 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-host-var-lib-cni-networks-ovn-kubernetes\") pod \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\" (UID: \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\") " Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.248094 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-ovnkube-script-lib\") pod \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\" (UID: \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\") " Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.248130 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-host-run-ovn-kubernetes\") pod \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\" (UID: \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\") " Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.248053 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-node-log" (OuterVolumeSpecName: "node-log") pod "28e01c08-a461-4f44-a49c-4bf92fd3a2ce" (UID: "28e01c08-a461-4f44-a49c-4bf92fd3a2ce"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.248155 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-etc-openvswitch\") pod \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\" (UID: \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\") " Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.248186 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9tctk\" (UniqueName: \"kubernetes.io/projected/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-kube-api-access-9tctk\") pod \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\" (UID: \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\") " Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.248223 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-env-overrides\") pod \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\" (UID: \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\") " Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.248248 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-host-slash\") pod \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\" (UID: \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\") " Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.248275 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-run-openvswitch\") pod \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\" (UID: \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\") " Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.248332 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-var-lib-openvswitch\") pod \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\" (UID: \"28e01c08-a461-4f44-a49c-4bf92fd3a2ce\") " Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.248186 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "28e01c08-a461-4f44-a49c-4bf92fd3a2ce" (UID: "28e01c08-a461-4f44-a49c-4bf92fd3a2ce"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.248216 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "28e01c08-a461-4f44-a49c-4bf92fd3a2ce" (UID: "28e01c08-a461-4f44-a49c-4bf92fd3a2ce"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.248482 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "28e01c08-a461-4f44-a49c-4bf92fd3a2ce" (UID: "28e01c08-a461-4f44-a49c-4bf92fd3a2ce"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.248482 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/cdf78a22-d9f2-473a-9309-3863ff7eba70-host-cni-bin\") pod \"ovnkube-node-xt42g\" (UID: \"cdf78a22-d9f2-473a-9309-3863ff7eba70\") " pod="openshift-ovn-kubernetes/ovnkube-node-xt42g" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.248497 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "28e01c08-a461-4f44-a49c-4bf92fd3a2ce" (UID: "28e01c08-a461-4f44-a49c-4bf92fd3a2ce"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.248542 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/cdf78a22-d9f2-473a-9309-3863ff7eba70-host-run-ovn-kubernetes\") pod \"ovnkube-node-xt42g\" (UID: \"cdf78a22-d9f2-473a-9309-3863ff7eba70\") " pod="openshift-ovn-kubernetes/ovnkube-node-xt42g" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.248571 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/cdf78a22-d9f2-473a-9309-3863ff7eba70-run-openvswitch\") pod \"ovnkube-node-xt42g\" (UID: \"cdf78a22-d9f2-473a-9309-3863ff7eba70\") " pod="openshift-ovn-kubernetes/ovnkube-node-xt42g" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.248605 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/cdf78a22-d9f2-473a-9309-3863ff7eba70-host-kubelet\") pod \"ovnkube-node-xt42g\" (UID: \"cdf78a22-d9f2-473a-9309-3863ff7eba70\") " pod="openshift-ovn-kubernetes/ovnkube-node-xt42g" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.248628 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/cdf78a22-d9f2-473a-9309-3863ff7eba70-run-ovn\") pod \"ovnkube-node-xt42g\" (UID: \"cdf78a22-d9f2-473a-9309-3863ff7eba70\") " pod="openshift-ovn-kubernetes/ovnkube-node-xt42g" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.248661 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/cdf78a22-d9f2-473a-9309-3863ff7eba70-run-systemd\") pod \"ovnkube-node-xt42g\" (UID: \"cdf78a22-d9f2-473a-9309-3863ff7eba70\") " pod="openshift-ovn-kubernetes/ovnkube-node-xt42g" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.248676 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/cdf78a22-d9f2-473a-9309-3863ff7eba70-node-log\") pod \"ovnkube-node-xt42g\" (UID: \"cdf78a22-d9f2-473a-9309-3863ff7eba70\") " pod="openshift-ovn-kubernetes/ovnkube-node-xt42g" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.248684 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "28e01c08-a461-4f44-a49c-4bf92fd3a2ce" (UID: "28e01c08-a461-4f44-a49c-4bf92fd3a2ce"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.248709 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/cdf78a22-d9f2-473a-9309-3863ff7eba70-host-run-netns\") pod \"ovnkube-node-xt42g\" (UID: \"cdf78a22-d9f2-473a-9309-3863ff7eba70\") " pod="openshift-ovn-kubernetes/ovnkube-node-xt42g" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.248708 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "28e01c08-a461-4f44-a49c-4bf92fd3a2ce" (UID: "28e01c08-a461-4f44-a49c-4bf92fd3a2ce"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.248752 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "28e01c08-a461-4f44-a49c-4bf92fd3a2ce" (UID: "28e01c08-a461-4f44-a49c-4bf92fd3a2ce"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.248778 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-host-slash" (OuterVolumeSpecName: "host-slash") pod "28e01c08-a461-4f44-a49c-4bf92fd3a2ce" (UID: "28e01c08-a461-4f44-a49c-4bf92fd3a2ce"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.248840 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/cdf78a22-d9f2-473a-9309-3863ff7eba70-ovnkube-script-lib\") pod \"ovnkube-node-xt42g\" (UID: \"cdf78a22-d9f2-473a-9309-3863ff7eba70\") " pod="openshift-ovn-kubernetes/ovnkube-node-xt42g" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.248872 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "28e01c08-a461-4f44-a49c-4bf92fd3a2ce" (UID: "28e01c08-a461-4f44-a49c-4bf92fd3a2ce"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.248877 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/cdf78a22-d9f2-473a-9309-3863ff7eba70-var-lib-openvswitch\") pod \"ovnkube-node-xt42g\" (UID: \"cdf78a22-d9f2-473a-9309-3863ff7eba70\") " pod="openshift-ovn-kubernetes/ovnkube-node-xt42g" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.248901 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "28e01c08-a461-4f44-a49c-4bf92fd3a2ce" (UID: "28e01c08-a461-4f44-a49c-4bf92fd3a2ce"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.248987 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/cdf78a22-d9f2-473a-9309-3863ff7eba70-host-slash\") pod \"ovnkube-node-xt42g\" (UID: \"cdf78a22-d9f2-473a-9309-3863ff7eba70\") " pod="openshift-ovn-kubernetes/ovnkube-node-xt42g" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.249009 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/cdf78a22-d9f2-473a-9309-3863ff7eba70-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-xt42g\" (UID: \"cdf78a22-d9f2-473a-9309-3863ff7eba70\") " pod="openshift-ovn-kubernetes/ovnkube-node-xt42g" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.249084 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/cdf78a22-d9f2-473a-9309-3863ff7eba70-env-overrides\") pod \"ovnkube-node-xt42g\" (UID: \"cdf78a22-d9f2-473a-9309-3863ff7eba70\") " pod="openshift-ovn-kubernetes/ovnkube-node-xt42g" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.249133 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ssbx7\" (UniqueName: \"kubernetes.io/projected/cdf78a22-d9f2-473a-9309-3863ff7eba70-kube-api-access-ssbx7\") pod \"ovnkube-node-xt42g\" (UID: \"cdf78a22-d9f2-473a-9309-3863ff7eba70\") " pod="openshift-ovn-kubernetes/ovnkube-node-xt42g" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.249154 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/cdf78a22-d9f2-473a-9309-3863ff7eba70-ovn-node-metrics-cert\") pod \"ovnkube-node-xt42g\" (UID: \"cdf78a22-d9f2-473a-9309-3863ff7eba70\") " pod="openshift-ovn-kubernetes/ovnkube-node-xt42g" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.249183 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/cdf78a22-d9f2-473a-9309-3863ff7eba70-systemd-units\") pod \"ovnkube-node-xt42g\" (UID: \"cdf78a22-d9f2-473a-9309-3863ff7eba70\") " pod="openshift-ovn-kubernetes/ovnkube-node-xt42g" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.249283 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/cdf78a22-d9f2-473a-9309-3863ff7eba70-ovnkube-config\") pod \"ovnkube-node-xt42g\" (UID: \"cdf78a22-d9f2-473a-9309-3863ff7eba70\") " pod="openshift-ovn-kubernetes/ovnkube-node-xt42g" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.249402 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/cdf78a22-d9f2-473a-9309-3863ff7eba70-log-socket\") pod \"ovnkube-node-xt42g\" (UID: \"cdf78a22-d9f2-473a-9309-3863ff7eba70\") " pod="openshift-ovn-kubernetes/ovnkube-node-xt42g" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.249467 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/cdf78a22-d9f2-473a-9309-3863ff7eba70-host-cni-netd\") pod \"ovnkube-node-xt42g\" (UID: \"cdf78a22-d9f2-473a-9309-3863ff7eba70\") " pod="openshift-ovn-kubernetes/ovnkube-node-xt42g" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.249507 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/cdf78a22-d9f2-473a-9309-3863ff7eba70-etc-openvswitch\") pod \"ovnkube-node-xt42g\" (UID: \"cdf78a22-d9f2-473a-9309-3863ff7eba70\") " pod="openshift-ovn-kubernetes/ovnkube-node-xt42g" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.249629 4651 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-host-cni-netd\") on node \"crc\" DevicePath \"\"" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.249646 4651 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-host-run-netns\") on node \"crc\" DevicePath \"\"" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.249657 4651 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-systemd-units\") on node \"crc\" DevicePath \"\"" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.249666 4651 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-ovnkube-config\") on node \"crc\" DevicePath \"\"" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.249677 4651 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-run-ovn\") on node \"crc\" DevicePath \"\"" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.249687 4651 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-log-socket\") on node \"crc\" DevicePath \"\"" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.249696 4651 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-host-cni-bin\") on node \"crc\" DevicePath \"\"" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.249705 4651 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-host-kubelet\") on node \"crc\" DevicePath \"\"" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.249715 4651 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-node-log\") on node \"crc\" DevicePath \"\"" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.249726 4651 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.249740 4651 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.249753 4651 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.249764 4651 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.249775 4651 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-env-overrides\") on node \"crc\" DevicePath \"\"" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.249784 4651 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-host-slash\") on node \"crc\" DevicePath \"\"" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.249794 4651 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-run-openvswitch\") on node \"crc\" DevicePath \"\"" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.249804 4651 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.253107 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-kube-api-access-9tctk" (OuterVolumeSpecName: "kube-api-access-9tctk") pod "28e01c08-a461-4f44-a49c-4bf92fd3a2ce" (UID: "28e01c08-a461-4f44-a49c-4bf92fd3a2ce"). InnerVolumeSpecName "kube-api-access-9tctk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.255211 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "28e01c08-a461-4f44-a49c-4bf92fd3a2ce" (UID: "28e01c08-a461-4f44-a49c-4bf92fd3a2ce"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.261018 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "28e01c08-a461-4f44-a49c-4bf92fd3a2ce" (UID: "28e01c08-a461-4f44-a49c-4bf92fd3a2ce"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.351314 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/cdf78a22-d9f2-473a-9309-3863ff7eba70-host-slash\") pod \"ovnkube-node-xt42g\" (UID: \"cdf78a22-d9f2-473a-9309-3863ff7eba70\") " pod="openshift-ovn-kubernetes/ovnkube-node-xt42g" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.351393 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/cdf78a22-d9f2-473a-9309-3863ff7eba70-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-xt42g\" (UID: \"cdf78a22-d9f2-473a-9309-3863ff7eba70\") " pod="openshift-ovn-kubernetes/ovnkube-node-xt42g" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.351459 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/cdf78a22-d9f2-473a-9309-3863ff7eba70-env-overrides\") pod \"ovnkube-node-xt42g\" (UID: \"cdf78a22-d9f2-473a-9309-3863ff7eba70\") " pod="openshift-ovn-kubernetes/ovnkube-node-xt42g" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.351522 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ssbx7\" (UniqueName: \"kubernetes.io/projected/cdf78a22-d9f2-473a-9309-3863ff7eba70-kube-api-access-ssbx7\") pod \"ovnkube-node-xt42g\" (UID: \"cdf78a22-d9f2-473a-9309-3863ff7eba70\") " pod="openshift-ovn-kubernetes/ovnkube-node-xt42g" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.351553 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/cdf78a22-d9f2-473a-9309-3863ff7eba70-ovn-node-metrics-cert\") pod \"ovnkube-node-xt42g\" (UID: \"cdf78a22-d9f2-473a-9309-3863ff7eba70\") " pod="openshift-ovn-kubernetes/ovnkube-node-xt42g" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.351585 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/cdf78a22-d9f2-473a-9309-3863ff7eba70-systemd-units\") pod \"ovnkube-node-xt42g\" (UID: \"cdf78a22-d9f2-473a-9309-3863ff7eba70\") " pod="openshift-ovn-kubernetes/ovnkube-node-xt42g" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.351613 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/cdf78a22-d9f2-473a-9309-3863ff7eba70-ovnkube-config\") pod \"ovnkube-node-xt42g\" (UID: \"cdf78a22-d9f2-473a-9309-3863ff7eba70\") " pod="openshift-ovn-kubernetes/ovnkube-node-xt42g" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.351606 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/cdf78a22-d9f2-473a-9309-3863ff7eba70-host-slash\") pod \"ovnkube-node-xt42g\" (UID: \"cdf78a22-d9f2-473a-9309-3863ff7eba70\") " pod="openshift-ovn-kubernetes/ovnkube-node-xt42g" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.351737 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/cdf78a22-d9f2-473a-9309-3863ff7eba70-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-xt42g\" (UID: \"cdf78a22-d9f2-473a-9309-3863ff7eba70\") " pod="openshift-ovn-kubernetes/ovnkube-node-xt42g" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.351646 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/cdf78a22-d9f2-473a-9309-3863ff7eba70-log-socket\") pod \"ovnkube-node-xt42g\" (UID: \"cdf78a22-d9f2-473a-9309-3863ff7eba70\") " pod="openshift-ovn-kubernetes/ovnkube-node-xt42g" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.352425 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/cdf78a22-d9f2-473a-9309-3863ff7eba70-host-cni-netd\") pod \"ovnkube-node-xt42g\" (UID: \"cdf78a22-d9f2-473a-9309-3863ff7eba70\") " pod="openshift-ovn-kubernetes/ovnkube-node-xt42g" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.352491 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/cdf78a22-d9f2-473a-9309-3863ff7eba70-etc-openvswitch\") pod \"ovnkube-node-xt42g\" (UID: \"cdf78a22-d9f2-473a-9309-3863ff7eba70\") " pod="openshift-ovn-kubernetes/ovnkube-node-xt42g" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.352530 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/cdf78a22-d9f2-473a-9309-3863ff7eba70-host-cni-bin\") pod \"ovnkube-node-xt42g\" (UID: \"cdf78a22-d9f2-473a-9309-3863ff7eba70\") " pod="openshift-ovn-kubernetes/ovnkube-node-xt42g" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.352569 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/cdf78a22-d9f2-473a-9309-3863ff7eba70-env-overrides\") pod \"ovnkube-node-xt42g\" (UID: \"cdf78a22-d9f2-473a-9309-3863ff7eba70\") " pod="openshift-ovn-kubernetes/ovnkube-node-xt42g" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.352594 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/cdf78a22-d9f2-473a-9309-3863ff7eba70-host-run-ovn-kubernetes\") pod \"ovnkube-node-xt42g\" (UID: \"cdf78a22-d9f2-473a-9309-3863ff7eba70\") " pod="openshift-ovn-kubernetes/ovnkube-node-xt42g" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.352647 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/cdf78a22-d9f2-473a-9309-3863ff7eba70-run-openvswitch\") pod \"ovnkube-node-xt42g\" (UID: \"cdf78a22-d9f2-473a-9309-3863ff7eba70\") " pod="openshift-ovn-kubernetes/ovnkube-node-xt42g" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.352697 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/cdf78a22-d9f2-473a-9309-3863ff7eba70-host-kubelet\") pod \"ovnkube-node-xt42g\" (UID: \"cdf78a22-d9f2-473a-9309-3863ff7eba70\") " pod="openshift-ovn-kubernetes/ovnkube-node-xt42g" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.352736 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/cdf78a22-d9f2-473a-9309-3863ff7eba70-run-ovn\") pod \"ovnkube-node-xt42g\" (UID: \"cdf78a22-d9f2-473a-9309-3863ff7eba70\") " pod="openshift-ovn-kubernetes/ovnkube-node-xt42g" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.352790 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/cdf78a22-d9f2-473a-9309-3863ff7eba70-run-systemd\") pod \"ovnkube-node-xt42g\" (UID: \"cdf78a22-d9f2-473a-9309-3863ff7eba70\") " pod="openshift-ovn-kubernetes/ovnkube-node-xt42g" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.352865 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/cdf78a22-d9f2-473a-9309-3863ff7eba70-node-log\") pod \"ovnkube-node-xt42g\" (UID: \"cdf78a22-d9f2-473a-9309-3863ff7eba70\") " pod="openshift-ovn-kubernetes/ovnkube-node-xt42g" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.352921 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/cdf78a22-d9f2-473a-9309-3863ff7eba70-host-run-netns\") pod \"ovnkube-node-xt42g\" (UID: \"cdf78a22-d9f2-473a-9309-3863ff7eba70\") " pod="openshift-ovn-kubernetes/ovnkube-node-xt42g" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.352974 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/cdf78a22-d9f2-473a-9309-3863ff7eba70-var-lib-openvswitch\") pod \"ovnkube-node-xt42g\" (UID: \"cdf78a22-d9f2-473a-9309-3863ff7eba70\") " pod="openshift-ovn-kubernetes/ovnkube-node-xt42g" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.353007 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/cdf78a22-d9f2-473a-9309-3863ff7eba70-ovnkube-script-lib\") pod \"ovnkube-node-xt42g\" (UID: \"cdf78a22-d9f2-473a-9309-3863ff7eba70\") " pod="openshift-ovn-kubernetes/ovnkube-node-xt42g" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.353535 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/cdf78a22-d9f2-473a-9309-3863ff7eba70-ovnkube-config\") pod \"ovnkube-node-xt42g\" (UID: \"cdf78a22-d9f2-473a-9309-3863ff7eba70\") " pod="openshift-ovn-kubernetes/ovnkube-node-xt42g" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.353611 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/cdf78a22-d9f2-473a-9309-3863ff7eba70-log-socket\") pod \"ovnkube-node-xt42g\" (UID: \"cdf78a22-d9f2-473a-9309-3863ff7eba70\") " pod="openshift-ovn-kubernetes/ovnkube-node-xt42g" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.353655 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9tctk\" (UniqueName: \"kubernetes.io/projected/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-kube-api-access-9tctk\") on node \"crc\" DevicePath \"\"" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.353696 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/cdf78a22-d9f2-473a-9309-3863ff7eba70-systemd-units\") pod \"ovnkube-node-xt42g\" (UID: \"cdf78a22-d9f2-473a-9309-3863ff7eba70\") " pod="openshift-ovn-kubernetes/ovnkube-node-xt42g" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.353720 4651 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.353746 4651 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/28e01c08-a461-4f44-a49c-4bf92fd3a2ce-run-systemd\") on node \"crc\" DevicePath \"\"" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.353852 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/cdf78a22-d9f2-473a-9309-3863ff7eba70-host-kubelet\") pod \"ovnkube-node-xt42g\" (UID: \"cdf78a22-d9f2-473a-9309-3863ff7eba70\") " pod="openshift-ovn-kubernetes/ovnkube-node-xt42g" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.353898 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/cdf78a22-d9f2-473a-9309-3863ff7eba70-host-cni-netd\") pod \"ovnkube-node-xt42g\" (UID: \"cdf78a22-d9f2-473a-9309-3863ff7eba70\") " pod="openshift-ovn-kubernetes/ovnkube-node-xt42g" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.353990 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/cdf78a22-d9f2-473a-9309-3863ff7eba70-etc-openvswitch\") pod \"ovnkube-node-xt42g\" (UID: \"cdf78a22-d9f2-473a-9309-3863ff7eba70\") " pod="openshift-ovn-kubernetes/ovnkube-node-xt42g" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.354048 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/cdf78a22-d9f2-473a-9309-3863ff7eba70-host-cni-bin\") pod \"ovnkube-node-xt42g\" (UID: \"cdf78a22-d9f2-473a-9309-3863ff7eba70\") " pod="openshift-ovn-kubernetes/ovnkube-node-xt42g" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.354092 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/cdf78a22-d9f2-473a-9309-3863ff7eba70-host-run-ovn-kubernetes\") pod \"ovnkube-node-xt42g\" (UID: \"cdf78a22-d9f2-473a-9309-3863ff7eba70\") " pod="openshift-ovn-kubernetes/ovnkube-node-xt42g" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.354134 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/cdf78a22-d9f2-473a-9309-3863ff7eba70-run-openvswitch\") pod \"ovnkube-node-xt42g\" (UID: \"cdf78a22-d9f2-473a-9309-3863ff7eba70\") " pod="openshift-ovn-kubernetes/ovnkube-node-xt42g" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.354179 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/cdf78a22-d9f2-473a-9309-3863ff7eba70-node-log\") pod \"ovnkube-node-xt42g\" (UID: \"cdf78a22-d9f2-473a-9309-3863ff7eba70\") " pod="openshift-ovn-kubernetes/ovnkube-node-xt42g" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.354741 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/cdf78a22-d9f2-473a-9309-3863ff7eba70-var-lib-openvswitch\") pod \"ovnkube-node-xt42g\" (UID: \"cdf78a22-d9f2-473a-9309-3863ff7eba70\") " pod="openshift-ovn-kubernetes/ovnkube-node-xt42g" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.354781 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/cdf78a22-d9f2-473a-9309-3863ff7eba70-run-ovn\") pod \"ovnkube-node-xt42g\" (UID: \"cdf78a22-d9f2-473a-9309-3863ff7eba70\") " pod="openshift-ovn-kubernetes/ovnkube-node-xt42g" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.354801 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/cdf78a22-d9f2-473a-9309-3863ff7eba70-run-systemd\") pod \"ovnkube-node-xt42g\" (UID: \"cdf78a22-d9f2-473a-9309-3863ff7eba70\") " pod="openshift-ovn-kubernetes/ovnkube-node-xt42g" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.354834 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/cdf78a22-d9f2-473a-9309-3863ff7eba70-host-run-netns\") pod \"ovnkube-node-xt42g\" (UID: \"cdf78a22-d9f2-473a-9309-3863ff7eba70\") " pod="openshift-ovn-kubernetes/ovnkube-node-xt42g" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.354888 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/cdf78a22-d9f2-473a-9309-3863ff7eba70-ovn-node-metrics-cert\") pod \"ovnkube-node-xt42g\" (UID: \"cdf78a22-d9f2-473a-9309-3863ff7eba70\") " pod="openshift-ovn-kubernetes/ovnkube-node-xt42g" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.355574 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/cdf78a22-d9f2-473a-9309-3863ff7eba70-ovnkube-script-lib\") pod \"ovnkube-node-xt42g\" (UID: \"cdf78a22-d9f2-473a-9309-3863ff7eba70\") " pod="openshift-ovn-kubernetes/ovnkube-node-xt42g" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.378583 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ssbx7\" (UniqueName: \"kubernetes.io/projected/cdf78a22-d9f2-473a-9309-3863ff7eba70-kube-api-access-ssbx7\") pod \"ovnkube-node-xt42g\" (UID: \"cdf78a22-d9f2-473a-9309-3863ff7eba70\") " pod="openshift-ovn-kubernetes/ovnkube-node-xt42g" Oct 11 05:01:14 crc kubenswrapper[4651]: I1011 05:01:14.434658 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-xt42g" Oct 11 05:01:14 crc kubenswrapper[4651]: W1011 05:01:14.458267 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcdf78a22_d9f2_473a_9309_3863ff7eba70.slice/crio-974b2e12b573a620242893641085eea1fa9933de06a3f497f36d70232de6e319 WatchSource:0}: Error finding container 974b2e12b573a620242893641085eea1fa9933de06a3f497f36d70232de6e319: Status 404 returned error can't find the container with id 974b2e12b573a620242893641085eea1fa9933de06a3f497f36d70232de6e319 Oct 11 05:01:15 crc kubenswrapper[4651]: I1011 05:01:15.053450 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6zt9s_28e01c08-a461-4f44-a49c-4bf92fd3a2ce/ovn-acl-logging/0.log" Oct 11 05:01:15 crc kubenswrapper[4651]: I1011 05:01:15.054246 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6zt9s_28e01c08-a461-4f44-a49c-4bf92fd3a2ce/ovn-controller/0.log" Oct 11 05:01:15 crc kubenswrapper[4651]: I1011 05:01:15.054984 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-6zt9s" Oct 11 05:01:15 crc kubenswrapper[4651]: I1011 05:01:15.056876 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-wz4hw_fbfdd781-994b-49b4-9c8e-edc0ea4145d1/kube-multus/2.log" Oct 11 05:01:15 crc kubenswrapper[4651]: I1011 05:01:15.059056 4651 generic.go:334] "Generic (PLEG): container finished" podID="cdf78a22-d9f2-473a-9309-3863ff7eba70" containerID="eec16106f3e0feea9413610e62ad3e534f1b76f0c406ef29ab07007e8d6507a4" exitCode=0 Oct 11 05:01:15 crc kubenswrapper[4651]: I1011 05:01:15.059129 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xt42g" event={"ID":"cdf78a22-d9f2-473a-9309-3863ff7eba70","Type":"ContainerDied","Data":"eec16106f3e0feea9413610e62ad3e534f1b76f0c406ef29ab07007e8d6507a4"} Oct 11 05:01:15 crc kubenswrapper[4651]: I1011 05:01:15.059188 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xt42g" event={"ID":"cdf78a22-d9f2-473a-9309-3863ff7eba70","Type":"ContainerStarted","Data":"974b2e12b573a620242893641085eea1fa9933de06a3f497f36d70232de6e319"} Oct 11 05:01:15 crc kubenswrapper[4651]: I1011 05:01:15.176666 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-6zt9s"] Oct 11 05:01:15 crc kubenswrapper[4651]: I1011 05:01:15.181139 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-6zt9s"] Oct 11 05:01:15 crc kubenswrapper[4651]: I1011 05:01:15.877355 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="28e01c08-a461-4f44-a49c-4bf92fd3a2ce" path="/var/lib/kubelet/pods/28e01c08-a461-4f44-a49c-4bf92fd3a2ce/volumes" Oct 11 05:01:16 crc kubenswrapper[4651]: I1011 05:01:16.068026 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xt42g" event={"ID":"cdf78a22-d9f2-473a-9309-3863ff7eba70","Type":"ContainerStarted","Data":"4fc94364c415a7c28880eac46745433d94c871a095adcee6dbf51b6a32ccda6e"} Oct 11 05:01:16 crc kubenswrapper[4651]: I1011 05:01:16.068914 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xt42g" event={"ID":"cdf78a22-d9f2-473a-9309-3863ff7eba70","Type":"ContainerStarted","Data":"c219c9f69b93c5e138e24d71499b239104b578542f665f331edc0658d95f892b"} Oct 11 05:01:16 crc kubenswrapper[4651]: I1011 05:01:16.068943 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xt42g" event={"ID":"cdf78a22-d9f2-473a-9309-3863ff7eba70","Type":"ContainerStarted","Data":"3ef5918d8e7ebe3d0816f47fc755e0c655d21e9bb82d3a774298244558420afa"} Oct 11 05:01:16 crc kubenswrapper[4651]: I1011 05:01:16.068957 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xt42g" event={"ID":"cdf78a22-d9f2-473a-9309-3863ff7eba70","Type":"ContainerStarted","Data":"58b4dfa8d0b40c60c607b843c554a3a59bde5d220fae989d931279beb72b73da"} Oct 11 05:01:16 crc kubenswrapper[4651]: I1011 05:01:16.068968 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xt42g" event={"ID":"cdf78a22-d9f2-473a-9309-3863ff7eba70","Type":"ContainerStarted","Data":"fa164aada502e21b07892160a8411cdfe39173a0747b0f7b5e956e85fa0b08a0"} Oct 11 05:01:16 crc kubenswrapper[4651]: I1011 05:01:16.068980 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xt42g" event={"ID":"cdf78a22-d9f2-473a-9309-3863ff7eba70","Type":"ContainerStarted","Data":"215d4400aa6cc04ed53ef01aa88ee92c7f90ee2e83ac7f3f18427c8ac522f798"} Oct 11 05:01:16 crc kubenswrapper[4651]: I1011 05:01:16.313390 4651 patch_prober.go:28] interesting pod/machine-config-daemon-78jnv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 05:01:16 crc kubenswrapper[4651]: I1011 05:01:16.313465 4651 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 05:01:16 crc kubenswrapper[4651]: I1011 05:01:16.313516 4651 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" Oct 11 05:01:16 crc kubenswrapper[4651]: I1011 05:01:16.314212 4651 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"4c4da425137942b26402e87d800a133130761ddfde8b5ad1911ac803f8d0758d"} pod="openshift-machine-config-operator/machine-config-daemon-78jnv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 11 05:01:16 crc kubenswrapper[4651]: I1011 05:01:16.314280 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" containerName="machine-config-daemon" containerID="cri-o://4c4da425137942b26402e87d800a133130761ddfde8b5ad1911ac803f8d0758d" gracePeriod=600 Oct 11 05:01:17 crc kubenswrapper[4651]: I1011 05:01:17.079179 4651 generic.go:334] "Generic (PLEG): container finished" podID="519a1ae1-e964-48b0-8b61-835146df28c1" containerID="4c4da425137942b26402e87d800a133130761ddfde8b5ad1911ac803f8d0758d" exitCode=0 Oct 11 05:01:17 crc kubenswrapper[4651]: I1011 05:01:17.079237 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" event={"ID":"519a1ae1-e964-48b0-8b61-835146df28c1","Type":"ContainerDied","Data":"4c4da425137942b26402e87d800a133130761ddfde8b5ad1911ac803f8d0758d"} Oct 11 05:01:17 crc kubenswrapper[4651]: I1011 05:01:17.079531 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" event={"ID":"519a1ae1-e964-48b0-8b61-835146df28c1","Type":"ContainerStarted","Data":"f3d31fd3172b3e1939d18cb8fc4eb85b3b6d1b1c4f71fa7a9aed3462d80c8443"} Oct 11 05:01:17 crc kubenswrapper[4651]: I1011 05:01:17.079556 4651 scope.go:117] "RemoveContainer" containerID="5ba3105ff1f646ac3c317b8777235fea6078905260c012507f606af9534c8bd2" Oct 11 05:01:19 crc kubenswrapper[4651]: I1011 05:01:19.099599 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xt42g" event={"ID":"cdf78a22-d9f2-473a-9309-3863ff7eba70","Type":"ContainerStarted","Data":"681420a4aa86f7f6fcda52f2a5d5c35331ec82a7137435bd1d3c3c92fe51fe1b"} Oct 11 05:01:19 crc kubenswrapper[4651]: I1011 05:01:19.998497 4651 scope.go:117] "RemoveContainer" containerID="68954082d4369b9b4d95ba1f15e19593dc330b698562be413055b2ba012b5f8c" Oct 11 05:01:20 crc kubenswrapper[4651]: I1011 05:01:20.021686 4651 scope.go:117] "RemoveContainer" containerID="ff59b524a1c2589496d6c2c71a6ce2ad3b7b309b0e6e25f0869d10c12a5a877e" Oct 11 05:01:20 crc kubenswrapper[4651]: I1011 05:01:20.048010 4651 scope.go:117] "RemoveContainer" containerID="add3ed68e0bf59bc569248a3660cfc9a20995641ec65b0bf7133e47c366aad73" Oct 11 05:01:20 crc kubenswrapper[4651]: I1011 05:01:20.076226 4651 scope.go:117] "RemoveContainer" containerID="788d6a46d740de869fe7cd1d28479f8115a3515c7bd2482e06cbc4dba7b27fac" Oct 11 05:01:20 crc kubenswrapper[4651]: I1011 05:01:20.100355 4651 scope.go:117] "RemoveContainer" containerID="ccd6180f4c5665d62ee9458795318ef2474c1938d19e6cbfec9c2174fe10e020" Oct 11 05:01:20 crc kubenswrapper[4651]: I1011 05:01:20.112624 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6zt9s_28e01c08-a461-4f44-a49c-4bf92fd3a2ce/ovn-acl-logging/0.log" Oct 11 05:01:20 crc kubenswrapper[4651]: I1011 05:01:20.113285 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6zt9s_28e01c08-a461-4f44-a49c-4bf92fd3a2ce/ovn-controller/0.log" Oct 11 05:01:20 crc kubenswrapper[4651]: I1011 05:01:20.125421 4651 scope.go:117] "RemoveContainer" containerID="3eed8335754437c935f07223bf4c4f40f9ab0bbdc9a86b7610f7f6fd55140e37" Oct 11 05:01:20 crc kubenswrapper[4651]: I1011 05:01:20.159231 4651 scope.go:117] "RemoveContainer" containerID="0fd0aaae2760a1c934aed54b8e07528f0f78db1149c9e1f8674bec90b61a7078" Oct 11 05:01:20 crc kubenswrapper[4651]: I1011 05:01:20.222762 4651 scope.go:117] "RemoveContainer" containerID="23b3fde15e637081cbf5fad2e55ccea1f0fe63d39ff5f82a60ce05ffeef9a837" Oct 11 05:01:20 crc kubenswrapper[4651]: I1011 05:01:20.239307 4651 scope.go:117] "RemoveContainer" containerID="99f927067aabe4690e99fe50a069afdf68b22d950a8ae141235571ced468c44e" Oct 11 05:01:21 crc kubenswrapper[4651]: I1011 05:01:21.128870 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xt42g" event={"ID":"cdf78a22-d9f2-473a-9309-3863ff7eba70","Type":"ContainerStarted","Data":"13fbd7399cc6a66caa5ae78080566bfe3b6ec610ca6b29925c40b67f6837998d"} Oct 11 05:01:21 crc kubenswrapper[4651]: I1011 05:01:21.129426 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-xt42g" Oct 11 05:01:21 crc kubenswrapper[4651]: I1011 05:01:21.129457 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-xt42g" Oct 11 05:01:21 crc kubenswrapper[4651]: I1011 05:01:21.129477 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-xt42g" Oct 11 05:01:21 crc kubenswrapper[4651]: I1011 05:01:21.174040 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-xt42g" Oct 11 05:01:21 crc kubenswrapper[4651]: I1011 05:01:21.176645 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-xt42g" podStartSLOduration=7.176616797 podStartE2EDuration="7.176616797s" podCreationTimestamp="2025-10-11 05:01:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 05:01:21.16991253 +0000 UTC m=+602.066145426" watchObservedRunningTime="2025-10-11 05:01:21.176616797 +0000 UTC m=+602.072849643" Oct 11 05:01:21 crc kubenswrapper[4651]: I1011 05:01:21.187159 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-xt42g" Oct 11 05:01:26 crc kubenswrapper[4651]: I1011 05:01:26.869286 4651 scope.go:117] "RemoveContainer" containerID="593f6c9a7505c4ee7c5c917e6b485fb997517f68d952455303c896adae9f2391" Oct 11 05:01:26 crc kubenswrapper[4651]: E1011 05:01:26.870196 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-wz4hw_openshift-multus(fbfdd781-994b-49b4-9c8e-edc0ea4145d1)\"" pod="openshift-multus/multus-wz4hw" podUID="fbfdd781-994b-49b4-9c8e-edc0ea4145d1" Oct 11 05:01:38 crc kubenswrapper[4651]: I1011 05:01:38.870402 4651 scope.go:117] "RemoveContainer" containerID="593f6c9a7505c4ee7c5c917e6b485fb997517f68d952455303c896adae9f2391" Oct 11 05:01:39 crc kubenswrapper[4651]: I1011 05:01:39.251720 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-wz4hw_fbfdd781-994b-49b4-9c8e-edc0ea4145d1/kube-multus/2.log" Oct 11 05:01:39 crc kubenswrapper[4651]: I1011 05:01:39.252236 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-wz4hw" event={"ID":"fbfdd781-994b-49b4-9c8e-edc0ea4145d1","Type":"ContainerStarted","Data":"559529ef922aa4fe9894a9194418f270962b94b2193ec119da9f38f688b72cf4"} Oct 11 05:01:44 crc kubenswrapper[4651]: I1011 05:01:44.460510 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-xt42g" Oct 11 05:01:53 crc kubenswrapper[4651]: I1011 05:01:53.650261 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c729hm"] Oct 11 05:01:53 crc kubenswrapper[4651]: I1011 05:01:53.652265 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c729hm" Oct 11 05:01:53 crc kubenswrapper[4651]: I1011 05:01:53.654527 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Oct 11 05:01:53 crc kubenswrapper[4651]: I1011 05:01:53.666105 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c729hm"] Oct 11 05:01:53 crc kubenswrapper[4651]: I1011 05:01:53.733658 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pvp46\" (UniqueName: \"kubernetes.io/projected/f4b8564c-1ee2-4b5b-8d7a-aa95cda9486c-kube-api-access-pvp46\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c729hm\" (UID: \"f4b8564c-1ee2-4b5b-8d7a-aa95cda9486c\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c729hm" Oct 11 05:01:53 crc kubenswrapper[4651]: I1011 05:01:53.733718 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f4b8564c-1ee2-4b5b-8d7a-aa95cda9486c-bundle\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c729hm\" (UID: \"f4b8564c-1ee2-4b5b-8d7a-aa95cda9486c\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c729hm" Oct 11 05:01:53 crc kubenswrapper[4651]: I1011 05:01:53.733768 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f4b8564c-1ee2-4b5b-8d7a-aa95cda9486c-util\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c729hm\" (UID: \"f4b8564c-1ee2-4b5b-8d7a-aa95cda9486c\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c729hm" Oct 11 05:01:53 crc kubenswrapper[4651]: I1011 05:01:53.835367 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pvp46\" (UniqueName: \"kubernetes.io/projected/f4b8564c-1ee2-4b5b-8d7a-aa95cda9486c-kube-api-access-pvp46\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c729hm\" (UID: \"f4b8564c-1ee2-4b5b-8d7a-aa95cda9486c\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c729hm" Oct 11 05:01:53 crc kubenswrapper[4651]: I1011 05:01:53.835426 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f4b8564c-1ee2-4b5b-8d7a-aa95cda9486c-bundle\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c729hm\" (UID: \"f4b8564c-1ee2-4b5b-8d7a-aa95cda9486c\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c729hm" Oct 11 05:01:53 crc kubenswrapper[4651]: I1011 05:01:53.835463 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f4b8564c-1ee2-4b5b-8d7a-aa95cda9486c-util\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c729hm\" (UID: \"f4b8564c-1ee2-4b5b-8d7a-aa95cda9486c\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c729hm" Oct 11 05:01:53 crc kubenswrapper[4651]: I1011 05:01:53.835868 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f4b8564c-1ee2-4b5b-8d7a-aa95cda9486c-util\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c729hm\" (UID: \"f4b8564c-1ee2-4b5b-8d7a-aa95cda9486c\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c729hm" Oct 11 05:01:53 crc kubenswrapper[4651]: I1011 05:01:53.836067 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f4b8564c-1ee2-4b5b-8d7a-aa95cda9486c-bundle\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c729hm\" (UID: \"f4b8564c-1ee2-4b5b-8d7a-aa95cda9486c\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c729hm" Oct 11 05:01:53 crc kubenswrapper[4651]: I1011 05:01:53.855214 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pvp46\" (UniqueName: \"kubernetes.io/projected/f4b8564c-1ee2-4b5b-8d7a-aa95cda9486c-kube-api-access-pvp46\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c729hm\" (UID: \"f4b8564c-1ee2-4b5b-8d7a-aa95cda9486c\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c729hm" Oct 11 05:01:53 crc kubenswrapper[4651]: I1011 05:01:53.983131 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c729hm" Oct 11 05:01:54 crc kubenswrapper[4651]: I1011 05:01:54.418254 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c729hm"] Oct 11 05:01:55 crc kubenswrapper[4651]: I1011 05:01:55.342242 4651 generic.go:334] "Generic (PLEG): container finished" podID="f4b8564c-1ee2-4b5b-8d7a-aa95cda9486c" containerID="f6ec0938870122f90c42c547a0935736833e340cbc27857dee51d210f105ccfc" exitCode=0 Oct 11 05:01:55 crc kubenswrapper[4651]: I1011 05:01:55.342310 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c729hm" event={"ID":"f4b8564c-1ee2-4b5b-8d7a-aa95cda9486c","Type":"ContainerDied","Data":"f6ec0938870122f90c42c547a0935736833e340cbc27857dee51d210f105ccfc"} Oct 11 05:01:55 crc kubenswrapper[4651]: I1011 05:01:55.342624 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c729hm" event={"ID":"f4b8564c-1ee2-4b5b-8d7a-aa95cda9486c","Type":"ContainerStarted","Data":"35f37c3748b37582bd3a6155b1064149a59c7ebe231a31a86d75ae2e21d05838"} Oct 11 05:01:57 crc kubenswrapper[4651]: I1011 05:01:57.364288 4651 generic.go:334] "Generic (PLEG): container finished" podID="f4b8564c-1ee2-4b5b-8d7a-aa95cda9486c" containerID="0b980f06c1f458da5de1c8e1804f02690b03dab009535abcc73d578febc1897b" exitCode=0 Oct 11 05:01:57 crc kubenswrapper[4651]: I1011 05:01:57.364323 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c729hm" event={"ID":"f4b8564c-1ee2-4b5b-8d7a-aa95cda9486c","Type":"ContainerDied","Data":"0b980f06c1f458da5de1c8e1804f02690b03dab009535abcc73d578febc1897b"} Oct 11 05:01:57 crc kubenswrapper[4651]: E1011 05:01:57.721162 4651 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b8564c_1ee2_4b5b_8d7a_aa95cda9486c.slice/crio-conmon-11e9df42ba617a8ba3029b9af24383be776762a2869aae27f163ab74fcb74a69.scope\": RecentStats: unable to find data in memory cache]" Oct 11 05:01:58 crc kubenswrapper[4651]: I1011 05:01:58.383223 4651 generic.go:334] "Generic (PLEG): container finished" podID="f4b8564c-1ee2-4b5b-8d7a-aa95cda9486c" containerID="11e9df42ba617a8ba3029b9af24383be776762a2869aae27f163ab74fcb74a69" exitCode=0 Oct 11 05:01:58 crc kubenswrapper[4651]: I1011 05:01:58.383278 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c729hm" event={"ID":"f4b8564c-1ee2-4b5b-8d7a-aa95cda9486c","Type":"ContainerDied","Data":"11e9df42ba617a8ba3029b9af24383be776762a2869aae27f163ab74fcb74a69"} Oct 11 05:01:59 crc kubenswrapper[4651]: I1011 05:01:59.708941 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c729hm" Oct 11 05:01:59 crc kubenswrapper[4651]: I1011 05:01:59.820315 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f4b8564c-1ee2-4b5b-8d7a-aa95cda9486c-bundle\") pod \"f4b8564c-1ee2-4b5b-8d7a-aa95cda9486c\" (UID: \"f4b8564c-1ee2-4b5b-8d7a-aa95cda9486c\") " Oct 11 05:01:59 crc kubenswrapper[4651]: I1011 05:01:59.820410 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f4b8564c-1ee2-4b5b-8d7a-aa95cda9486c-util\") pod \"f4b8564c-1ee2-4b5b-8d7a-aa95cda9486c\" (UID: \"f4b8564c-1ee2-4b5b-8d7a-aa95cda9486c\") " Oct 11 05:01:59 crc kubenswrapper[4651]: I1011 05:01:59.820476 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pvp46\" (UniqueName: \"kubernetes.io/projected/f4b8564c-1ee2-4b5b-8d7a-aa95cda9486c-kube-api-access-pvp46\") pod \"f4b8564c-1ee2-4b5b-8d7a-aa95cda9486c\" (UID: \"f4b8564c-1ee2-4b5b-8d7a-aa95cda9486c\") " Oct 11 05:01:59 crc kubenswrapper[4651]: I1011 05:01:59.821997 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f4b8564c-1ee2-4b5b-8d7a-aa95cda9486c-bundle" (OuterVolumeSpecName: "bundle") pod "f4b8564c-1ee2-4b5b-8d7a-aa95cda9486c" (UID: "f4b8564c-1ee2-4b5b-8d7a-aa95cda9486c"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 05:01:59 crc kubenswrapper[4651]: I1011 05:01:59.828466 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f4b8564c-1ee2-4b5b-8d7a-aa95cda9486c-kube-api-access-pvp46" (OuterVolumeSpecName: "kube-api-access-pvp46") pod "f4b8564c-1ee2-4b5b-8d7a-aa95cda9486c" (UID: "f4b8564c-1ee2-4b5b-8d7a-aa95cda9486c"). InnerVolumeSpecName "kube-api-access-pvp46". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:01:59 crc kubenswrapper[4651]: I1011 05:01:59.844779 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f4b8564c-1ee2-4b5b-8d7a-aa95cda9486c-util" (OuterVolumeSpecName: "util") pod "f4b8564c-1ee2-4b5b-8d7a-aa95cda9486c" (UID: "f4b8564c-1ee2-4b5b-8d7a-aa95cda9486c"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 05:01:59 crc kubenswrapper[4651]: I1011 05:01:59.922226 4651 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f4b8564c-1ee2-4b5b-8d7a-aa95cda9486c-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 05:01:59 crc kubenswrapper[4651]: I1011 05:01:59.922284 4651 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f4b8564c-1ee2-4b5b-8d7a-aa95cda9486c-util\") on node \"crc\" DevicePath \"\"" Oct 11 05:01:59 crc kubenswrapper[4651]: I1011 05:01:59.922298 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pvp46\" (UniqueName: \"kubernetes.io/projected/f4b8564c-1ee2-4b5b-8d7a-aa95cda9486c-kube-api-access-pvp46\") on node \"crc\" DevicePath \"\"" Oct 11 05:02:00 crc kubenswrapper[4651]: I1011 05:02:00.395516 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c729hm" event={"ID":"f4b8564c-1ee2-4b5b-8d7a-aa95cda9486c","Type":"ContainerDied","Data":"35f37c3748b37582bd3a6155b1064149a59c7ebe231a31a86d75ae2e21d05838"} Oct 11 05:02:00 crc kubenswrapper[4651]: I1011 05:02:00.395748 4651 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="35f37c3748b37582bd3a6155b1064149a59c7ebe231a31a86d75ae2e21d05838" Oct 11 05:02:00 crc kubenswrapper[4651]: I1011 05:02:00.395805 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c729hm" Oct 11 05:02:05 crc kubenswrapper[4651]: I1011 05:02:05.203885 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-858ddd8f98-p95g9"] Oct 11 05:02:05 crc kubenswrapper[4651]: E1011 05:02:05.204492 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b8564c-1ee2-4b5b-8d7a-aa95cda9486c" containerName="pull" Oct 11 05:02:05 crc kubenswrapper[4651]: I1011 05:02:05.204513 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b8564c-1ee2-4b5b-8d7a-aa95cda9486c" containerName="pull" Oct 11 05:02:05 crc kubenswrapper[4651]: E1011 05:02:05.204544 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b8564c-1ee2-4b5b-8d7a-aa95cda9486c" containerName="util" Oct 11 05:02:05 crc kubenswrapper[4651]: I1011 05:02:05.204557 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b8564c-1ee2-4b5b-8d7a-aa95cda9486c" containerName="util" Oct 11 05:02:05 crc kubenswrapper[4651]: E1011 05:02:05.204585 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b8564c-1ee2-4b5b-8d7a-aa95cda9486c" containerName="extract" Oct 11 05:02:05 crc kubenswrapper[4651]: I1011 05:02:05.204600 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b8564c-1ee2-4b5b-8d7a-aa95cda9486c" containerName="extract" Oct 11 05:02:05 crc kubenswrapper[4651]: I1011 05:02:05.204777 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b8564c-1ee2-4b5b-8d7a-aa95cda9486c" containerName="extract" Oct 11 05:02:05 crc kubenswrapper[4651]: I1011 05:02:05.205404 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-858ddd8f98-p95g9" Oct 11 05:02:05 crc kubenswrapper[4651]: I1011 05:02:05.207479 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Oct 11 05:02:05 crc kubenswrapper[4651]: I1011 05:02:05.207514 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Oct 11 05:02:05 crc kubenswrapper[4651]: I1011 05:02:05.209172 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-js2fb" Oct 11 05:02:05 crc kubenswrapper[4651]: I1011 05:02:05.220650 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-858ddd8f98-p95g9"] Oct 11 05:02:05 crc kubenswrapper[4651]: I1011 05:02:05.283565 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f48g5\" (UniqueName: \"kubernetes.io/projected/c7c3404e-ed5a-48d9-b525-7451514c9a5c-kube-api-access-f48g5\") pod \"nmstate-operator-858ddd8f98-p95g9\" (UID: \"c7c3404e-ed5a-48d9-b525-7451514c9a5c\") " pod="openshift-nmstate/nmstate-operator-858ddd8f98-p95g9" Oct 11 05:02:05 crc kubenswrapper[4651]: I1011 05:02:05.384651 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f48g5\" (UniqueName: \"kubernetes.io/projected/c7c3404e-ed5a-48d9-b525-7451514c9a5c-kube-api-access-f48g5\") pod \"nmstate-operator-858ddd8f98-p95g9\" (UID: \"c7c3404e-ed5a-48d9-b525-7451514c9a5c\") " pod="openshift-nmstate/nmstate-operator-858ddd8f98-p95g9" Oct 11 05:02:05 crc kubenswrapper[4651]: I1011 05:02:05.402956 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f48g5\" (UniqueName: \"kubernetes.io/projected/c7c3404e-ed5a-48d9-b525-7451514c9a5c-kube-api-access-f48g5\") pod \"nmstate-operator-858ddd8f98-p95g9\" (UID: \"c7c3404e-ed5a-48d9-b525-7451514c9a5c\") " pod="openshift-nmstate/nmstate-operator-858ddd8f98-p95g9" Oct 11 05:02:05 crc kubenswrapper[4651]: I1011 05:02:05.523934 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-858ddd8f98-p95g9" Oct 11 05:02:05 crc kubenswrapper[4651]: I1011 05:02:05.992792 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-858ddd8f98-p95g9"] Oct 11 05:02:06 crc kubenswrapper[4651]: I1011 05:02:06.427569 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-858ddd8f98-p95g9" event={"ID":"c7c3404e-ed5a-48d9-b525-7451514c9a5c","Type":"ContainerStarted","Data":"5c029c2fe7cffff03724b4596c7343ae659b92722504ec38409942b0bb47b98d"} Oct 11 05:02:08 crc kubenswrapper[4651]: I1011 05:02:08.439761 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-858ddd8f98-p95g9" event={"ID":"c7c3404e-ed5a-48d9-b525-7451514c9a5c","Type":"ContainerStarted","Data":"1b3cb417898caad7963c7a4b0409283b23587430b42cf7545b1176a92064343f"} Oct 11 05:02:08 crc kubenswrapper[4651]: I1011 05:02:08.461130 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-858ddd8f98-p95g9" podStartSLOduration=1.562514331 podStartE2EDuration="3.461115861s" podCreationTimestamp="2025-10-11 05:02:05 +0000 UTC" firstStartedPulling="2025-10-11 05:02:06.000832377 +0000 UTC m=+646.897065173" lastFinishedPulling="2025-10-11 05:02:07.899433907 +0000 UTC m=+648.795666703" observedRunningTime="2025-10-11 05:02:08.45501863 +0000 UTC m=+649.351251446" watchObservedRunningTime="2025-10-11 05:02:08.461115861 +0000 UTC m=+649.357348657" Oct 11 05:02:14 crc kubenswrapper[4651]: I1011 05:02:14.286725 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-fdff9cb8d-gg29l"] Oct 11 05:02:14 crc kubenswrapper[4651]: I1011 05:02:14.288793 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-gg29l" Oct 11 05:02:14 crc kubenswrapper[4651]: I1011 05:02:14.291016 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-gs8h9" Oct 11 05:02:14 crc kubenswrapper[4651]: I1011 05:02:14.297026 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-6cdbc54649-dlvdg"] Oct 11 05:02:14 crc kubenswrapper[4651]: I1011 05:02:14.297918 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-dlvdg" Oct 11 05:02:14 crc kubenswrapper[4651]: I1011 05:02:14.299573 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Oct 11 05:02:14 crc kubenswrapper[4651]: I1011 05:02:14.302512 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-fdff9cb8d-gg29l"] Oct 11 05:02:14 crc kubenswrapper[4651]: I1011 05:02:14.318139 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-qpgc7"] Oct 11 05:02:14 crc kubenswrapper[4651]: I1011 05:02:14.319058 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-qpgc7" Oct 11 05:02:14 crc kubenswrapper[4651]: I1011 05:02:14.333145 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-6cdbc54649-dlvdg"] Oct 11 05:02:14 crc kubenswrapper[4651]: I1011 05:02:14.397860 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r2glg\" (UniqueName: \"kubernetes.io/projected/85293b68-d608-466d-9aa5-2b5eae8edc74-kube-api-access-r2glg\") pod \"nmstate-webhook-6cdbc54649-dlvdg\" (UID: \"85293b68-d608-466d-9aa5-2b5eae8edc74\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-dlvdg" Oct 11 05:02:14 crc kubenswrapper[4651]: I1011 05:02:14.397927 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cmc5c\" (UniqueName: \"kubernetes.io/projected/298ed4c9-1190-4617-a3c7-147f15e1fea3-kube-api-access-cmc5c\") pod \"nmstate-metrics-fdff9cb8d-gg29l\" (UID: \"298ed4c9-1190-4617-a3c7-147f15e1fea3\") " pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-gg29l" Oct 11 05:02:14 crc kubenswrapper[4651]: I1011 05:02:14.398008 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/50f6467a-ad8d-4828-81ba-b944dccc4be7-dbus-socket\") pod \"nmstate-handler-qpgc7\" (UID: \"50f6467a-ad8d-4828-81ba-b944dccc4be7\") " pod="openshift-nmstate/nmstate-handler-qpgc7" Oct 11 05:02:14 crc kubenswrapper[4651]: I1011 05:02:14.398054 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/50f6467a-ad8d-4828-81ba-b944dccc4be7-nmstate-lock\") pod \"nmstate-handler-qpgc7\" (UID: \"50f6467a-ad8d-4828-81ba-b944dccc4be7\") " pod="openshift-nmstate/nmstate-handler-qpgc7" Oct 11 05:02:14 crc kubenswrapper[4651]: I1011 05:02:14.398180 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/85293b68-d608-466d-9aa5-2b5eae8edc74-tls-key-pair\") pod \"nmstate-webhook-6cdbc54649-dlvdg\" (UID: \"85293b68-d608-466d-9aa5-2b5eae8edc74\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-dlvdg" Oct 11 05:02:14 crc kubenswrapper[4651]: I1011 05:02:14.398226 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rj6n2\" (UniqueName: \"kubernetes.io/projected/50f6467a-ad8d-4828-81ba-b944dccc4be7-kube-api-access-rj6n2\") pod \"nmstate-handler-qpgc7\" (UID: \"50f6467a-ad8d-4828-81ba-b944dccc4be7\") " pod="openshift-nmstate/nmstate-handler-qpgc7" Oct 11 05:02:14 crc kubenswrapper[4651]: I1011 05:02:14.398254 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/50f6467a-ad8d-4828-81ba-b944dccc4be7-ovs-socket\") pod \"nmstate-handler-qpgc7\" (UID: \"50f6467a-ad8d-4828-81ba-b944dccc4be7\") " pod="openshift-nmstate/nmstate-handler-qpgc7" Oct 11 05:02:14 crc kubenswrapper[4651]: I1011 05:02:14.423809 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-6b874cbd85-zwhxz"] Oct 11 05:02:14 crc kubenswrapper[4651]: I1011 05:02:14.424595 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-zwhxz" Oct 11 05:02:14 crc kubenswrapper[4651]: I1011 05:02:14.430120 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Oct 11 05:02:14 crc kubenswrapper[4651]: I1011 05:02:14.431471 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-927md" Oct 11 05:02:14 crc kubenswrapper[4651]: I1011 05:02:14.444093 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Oct 11 05:02:14 crc kubenswrapper[4651]: I1011 05:02:14.458787 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-6b874cbd85-zwhxz"] Oct 11 05:02:14 crc kubenswrapper[4651]: I1011 05:02:14.504425 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/50f6467a-ad8d-4828-81ba-b944dccc4be7-nmstate-lock\") pod \"nmstate-handler-qpgc7\" (UID: \"50f6467a-ad8d-4828-81ba-b944dccc4be7\") " pod="openshift-nmstate/nmstate-handler-qpgc7" Oct 11 05:02:14 crc kubenswrapper[4651]: I1011 05:02:14.504477 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/85293b68-d608-466d-9aa5-2b5eae8edc74-tls-key-pair\") pod \"nmstate-webhook-6cdbc54649-dlvdg\" (UID: \"85293b68-d608-466d-9aa5-2b5eae8edc74\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-dlvdg" Oct 11 05:02:14 crc kubenswrapper[4651]: I1011 05:02:14.504494 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rj6n2\" (UniqueName: \"kubernetes.io/projected/50f6467a-ad8d-4828-81ba-b944dccc4be7-kube-api-access-rj6n2\") pod \"nmstate-handler-qpgc7\" (UID: \"50f6467a-ad8d-4828-81ba-b944dccc4be7\") " pod="openshift-nmstate/nmstate-handler-qpgc7" Oct 11 05:02:14 crc kubenswrapper[4651]: I1011 05:02:14.504511 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/50f6467a-ad8d-4828-81ba-b944dccc4be7-ovs-socket\") pod \"nmstate-handler-qpgc7\" (UID: \"50f6467a-ad8d-4828-81ba-b944dccc4be7\") " pod="openshift-nmstate/nmstate-handler-qpgc7" Oct 11 05:02:14 crc kubenswrapper[4651]: I1011 05:02:14.504546 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/03a76fa3-7b5c-4b1c-9c67-d54f0e448c2d-plugin-serving-cert\") pod \"nmstate-console-plugin-6b874cbd85-zwhxz\" (UID: \"03a76fa3-7b5c-4b1c-9c67-d54f0e448c2d\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-zwhxz" Oct 11 05:02:14 crc kubenswrapper[4651]: I1011 05:02:14.504574 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/03a76fa3-7b5c-4b1c-9c67-d54f0e448c2d-nginx-conf\") pod \"nmstate-console-plugin-6b874cbd85-zwhxz\" (UID: \"03a76fa3-7b5c-4b1c-9c67-d54f0e448c2d\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-zwhxz" Oct 11 05:02:14 crc kubenswrapper[4651]: I1011 05:02:14.504608 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r2glg\" (UniqueName: \"kubernetes.io/projected/85293b68-d608-466d-9aa5-2b5eae8edc74-kube-api-access-r2glg\") pod \"nmstate-webhook-6cdbc54649-dlvdg\" (UID: \"85293b68-d608-466d-9aa5-2b5eae8edc74\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-dlvdg" Oct 11 05:02:14 crc kubenswrapper[4651]: I1011 05:02:14.504631 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cmc5c\" (UniqueName: \"kubernetes.io/projected/298ed4c9-1190-4617-a3c7-147f15e1fea3-kube-api-access-cmc5c\") pod \"nmstate-metrics-fdff9cb8d-gg29l\" (UID: \"298ed4c9-1190-4617-a3c7-147f15e1fea3\") " pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-gg29l" Oct 11 05:02:14 crc kubenswrapper[4651]: I1011 05:02:14.504653 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x7p92\" (UniqueName: \"kubernetes.io/projected/03a76fa3-7b5c-4b1c-9c67-d54f0e448c2d-kube-api-access-x7p92\") pod \"nmstate-console-plugin-6b874cbd85-zwhxz\" (UID: \"03a76fa3-7b5c-4b1c-9c67-d54f0e448c2d\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-zwhxz" Oct 11 05:02:14 crc kubenswrapper[4651]: I1011 05:02:14.504670 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/50f6467a-ad8d-4828-81ba-b944dccc4be7-dbus-socket\") pod \"nmstate-handler-qpgc7\" (UID: \"50f6467a-ad8d-4828-81ba-b944dccc4be7\") " pod="openshift-nmstate/nmstate-handler-qpgc7" Oct 11 05:02:14 crc kubenswrapper[4651]: I1011 05:02:14.504951 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/50f6467a-ad8d-4828-81ba-b944dccc4be7-dbus-socket\") pod \"nmstate-handler-qpgc7\" (UID: \"50f6467a-ad8d-4828-81ba-b944dccc4be7\") " pod="openshift-nmstate/nmstate-handler-qpgc7" Oct 11 05:02:14 crc kubenswrapper[4651]: I1011 05:02:14.504983 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/50f6467a-ad8d-4828-81ba-b944dccc4be7-ovs-socket\") pod \"nmstate-handler-qpgc7\" (UID: \"50f6467a-ad8d-4828-81ba-b944dccc4be7\") " pod="openshift-nmstate/nmstate-handler-qpgc7" Oct 11 05:02:14 crc kubenswrapper[4651]: I1011 05:02:14.504987 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/50f6467a-ad8d-4828-81ba-b944dccc4be7-nmstate-lock\") pod \"nmstate-handler-qpgc7\" (UID: \"50f6467a-ad8d-4828-81ba-b944dccc4be7\") " pod="openshift-nmstate/nmstate-handler-qpgc7" Oct 11 05:02:14 crc kubenswrapper[4651]: E1011 05:02:14.505062 4651 secret.go:188] Couldn't get secret openshift-nmstate/openshift-nmstate-webhook: secret "openshift-nmstate-webhook" not found Oct 11 05:02:14 crc kubenswrapper[4651]: E1011 05:02:14.505110 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/85293b68-d608-466d-9aa5-2b5eae8edc74-tls-key-pair podName:85293b68-d608-466d-9aa5-2b5eae8edc74 nodeName:}" failed. No retries permitted until 2025-10-11 05:02:15.005092556 +0000 UTC m=+655.901325352 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "tls-key-pair" (UniqueName: "kubernetes.io/secret/85293b68-d608-466d-9aa5-2b5eae8edc74-tls-key-pair") pod "nmstate-webhook-6cdbc54649-dlvdg" (UID: "85293b68-d608-466d-9aa5-2b5eae8edc74") : secret "openshift-nmstate-webhook" not found Oct 11 05:02:14 crc kubenswrapper[4651]: I1011 05:02:14.531901 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cmc5c\" (UniqueName: \"kubernetes.io/projected/298ed4c9-1190-4617-a3c7-147f15e1fea3-kube-api-access-cmc5c\") pod \"nmstate-metrics-fdff9cb8d-gg29l\" (UID: \"298ed4c9-1190-4617-a3c7-147f15e1fea3\") " pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-gg29l" Oct 11 05:02:14 crc kubenswrapper[4651]: I1011 05:02:14.531931 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r2glg\" (UniqueName: \"kubernetes.io/projected/85293b68-d608-466d-9aa5-2b5eae8edc74-kube-api-access-r2glg\") pod \"nmstate-webhook-6cdbc54649-dlvdg\" (UID: \"85293b68-d608-466d-9aa5-2b5eae8edc74\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-dlvdg" Oct 11 05:02:14 crc kubenswrapper[4651]: I1011 05:02:14.542297 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rj6n2\" (UniqueName: \"kubernetes.io/projected/50f6467a-ad8d-4828-81ba-b944dccc4be7-kube-api-access-rj6n2\") pod \"nmstate-handler-qpgc7\" (UID: \"50f6467a-ad8d-4828-81ba-b944dccc4be7\") " pod="openshift-nmstate/nmstate-handler-qpgc7" Oct 11 05:02:14 crc kubenswrapper[4651]: I1011 05:02:14.605643 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/03a76fa3-7b5c-4b1c-9c67-d54f0e448c2d-nginx-conf\") pod \"nmstate-console-plugin-6b874cbd85-zwhxz\" (UID: \"03a76fa3-7b5c-4b1c-9c67-d54f0e448c2d\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-zwhxz" Oct 11 05:02:14 crc kubenswrapper[4651]: I1011 05:02:14.606242 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x7p92\" (UniqueName: \"kubernetes.io/projected/03a76fa3-7b5c-4b1c-9c67-d54f0e448c2d-kube-api-access-x7p92\") pod \"nmstate-console-plugin-6b874cbd85-zwhxz\" (UID: \"03a76fa3-7b5c-4b1c-9c67-d54f0e448c2d\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-zwhxz" Oct 11 05:02:14 crc kubenswrapper[4651]: I1011 05:02:14.606380 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/03a76fa3-7b5c-4b1c-9c67-d54f0e448c2d-plugin-serving-cert\") pod \"nmstate-console-plugin-6b874cbd85-zwhxz\" (UID: \"03a76fa3-7b5c-4b1c-9c67-d54f0e448c2d\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-zwhxz" Oct 11 05:02:14 crc kubenswrapper[4651]: I1011 05:02:14.606839 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/03a76fa3-7b5c-4b1c-9c67-d54f0e448c2d-nginx-conf\") pod \"nmstate-console-plugin-6b874cbd85-zwhxz\" (UID: \"03a76fa3-7b5c-4b1c-9c67-d54f0e448c2d\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-zwhxz" Oct 11 05:02:14 crc kubenswrapper[4651]: I1011 05:02:14.608728 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-gg29l" Oct 11 05:02:14 crc kubenswrapper[4651]: I1011 05:02:14.609068 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/03a76fa3-7b5c-4b1c-9c67-d54f0e448c2d-plugin-serving-cert\") pod \"nmstate-console-plugin-6b874cbd85-zwhxz\" (UID: \"03a76fa3-7b5c-4b1c-9c67-d54f0e448c2d\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-zwhxz" Oct 11 05:02:14 crc kubenswrapper[4651]: I1011 05:02:14.622125 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x7p92\" (UniqueName: \"kubernetes.io/projected/03a76fa3-7b5c-4b1c-9c67-d54f0e448c2d-kube-api-access-x7p92\") pod \"nmstate-console-plugin-6b874cbd85-zwhxz\" (UID: \"03a76fa3-7b5c-4b1c-9c67-d54f0e448c2d\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-zwhxz" Oct 11 05:02:14 crc kubenswrapper[4651]: I1011 05:02:14.637812 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-qpgc7" Oct 11 05:02:14 crc kubenswrapper[4651]: I1011 05:02:14.667856 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-68bf86dbc-bk66w"] Oct 11 05:02:14 crc kubenswrapper[4651]: I1011 05:02:14.668679 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-68bf86dbc-bk66w" Oct 11 05:02:14 crc kubenswrapper[4651]: I1011 05:02:14.686014 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-68bf86dbc-bk66w"] Oct 11 05:02:14 crc kubenswrapper[4651]: I1011 05:02:14.707785 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/f8b0b76d-6dd4-4f91-aaf4-c0cdcaa9308d-console-config\") pod \"console-68bf86dbc-bk66w\" (UID: \"f8b0b76d-6dd4-4f91-aaf4-c0cdcaa9308d\") " pod="openshift-console/console-68bf86dbc-bk66w" Oct 11 05:02:14 crc kubenswrapper[4651]: I1011 05:02:14.707855 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dm69l\" (UniqueName: \"kubernetes.io/projected/f8b0b76d-6dd4-4f91-aaf4-c0cdcaa9308d-kube-api-access-dm69l\") pod \"console-68bf86dbc-bk66w\" (UID: \"f8b0b76d-6dd4-4f91-aaf4-c0cdcaa9308d\") " pod="openshift-console/console-68bf86dbc-bk66w" Oct 11 05:02:14 crc kubenswrapper[4651]: I1011 05:02:14.708101 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/f8b0b76d-6dd4-4f91-aaf4-c0cdcaa9308d-console-serving-cert\") pod \"console-68bf86dbc-bk66w\" (UID: \"f8b0b76d-6dd4-4f91-aaf4-c0cdcaa9308d\") " pod="openshift-console/console-68bf86dbc-bk66w" Oct 11 05:02:14 crc kubenswrapper[4651]: I1011 05:02:14.708126 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/f8b0b76d-6dd4-4f91-aaf4-c0cdcaa9308d-oauth-serving-cert\") pod \"console-68bf86dbc-bk66w\" (UID: \"f8b0b76d-6dd4-4f91-aaf4-c0cdcaa9308d\") " pod="openshift-console/console-68bf86dbc-bk66w" Oct 11 05:02:14 crc kubenswrapper[4651]: I1011 05:02:14.708268 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f8b0b76d-6dd4-4f91-aaf4-c0cdcaa9308d-trusted-ca-bundle\") pod \"console-68bf86dbc-bk66w\" (UID: \"f8b0b76d-6dd4-4f91-aaf4-c0cdcaa9308d\") " pod="openshift-console/console-68bf86dbc-bk66w" Oct 11 05:02:14 crc kubenswrapper[4651]: I1011 05:02:14.708316 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/f8b0b76d-6dd4-4f91-aaf4-c0cdcaa9308d-service-ca\") pod \"console-68bf86dbc-bk66w\" (UID: \"f8b0b76d-6dd4-4f91-aaf4-c0cdcaa9308d\") " pod="openshift-console/console-68bf86dbc-bk66w" Oct 11 05:02:14 crc kubenswrapper[4651]: I1011 05:02:14.708410 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/f8b0b76d-6dd4-4f91-aaf4-c0cdcaa9308d-console-oauth-config\") pod \"console-68bf86dbc-bk66w\" (UID: \"f8b0b76d-6dd4-4f91-aaf4-c0cdcaa9308d\") " pod="openshift-console/console-68bf86dbc-bk66w" Oct 11 05:02:14 crc kubenswrapper[4651]: I1011 05:02:14.758284 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-zwhxz" Oct 11 05:02:14 crc kubenswrapper[4651]: I1011 05:02:14.809905 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/f8b0b76d-6dd4-4f91-aaf4-c0cdcaa9308d-console-config\") pod \"console-68bf86dbc-bk66w\" (UID: \"f8b0b76d-6dd4-4f91-aaf4-c0cdcaa9308d\") " pod="openshift-console/console-68bf86dbc-bk66w" Oct 11 05:02:14 crc kubenswrapper[4651]: I1011 05:02:14.809954 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dm69l\" (UniqueName: \"kubernetes.io/projected/f8b0b76d-6dd4-4f91-aaf4-c0cdcaa9308d-kube-api-access-dm69l\") pod \"console-68bf86dbc-bk66w\" (UID: \"f8b0b76d-6dd4-4f91-aaf4-c0cdcaa9308d\") " pod="openshift-console/console-68bf86dbc-bk66w" Oct 11 05:02:14 crc kubenswrapper[4651]: I1011 05:02:14.809991 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/f8b0b76d-6dd4-4f91-aaf4-c0cdcaa9308d-console-serving-cert\") pod \"console-68bf86dbc-bk66w\" (UID: \"f8b0b76d-6dd4-4f91-aaf4-c0cdcaa9308d\") " pod="openshift-console/console-68bf86dbc-bk66w" Oct 11 05:02:14 crc kubenswrapper[4651]: I1011 05:02:14.811486 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/f8b0b76d-6dd4-4f91-aaf4-c0cdcaa9308d-console-config\") pod \"console-68bf86dbc-bk66w\" (UID: \"f8b0b76d-6dd4-4f91-aaf4-c0cdcaa9308d\") " pod="openshift-console/console-68bf86dbc-bk66w" Oct 11 05:02:14 crc kubenswrapper[4651]: I1011 05:02:14.811563 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/f8b0b76d-6dd4-4f91-aaf4-c0cdcaa9308d-oauth-serving-cert\") pod \"console-68bf86dbc-bk66w\" (UID: \"f8b0b76d-6dd4-4f91-aaf4-c0cdcaa9308d\") " pod="openshift-console/console-68bf86dbc-bk66w" Oct 11 05:02:14 crc kubenswrapper[4651]: I1011 05:02:14.811636 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f8b0b76d-6dd4-4f91-aaf4-c0cdcaa9308d-trusted-ca-bundle\") pod \"console-68bf86dbc-bk66w\" (UID: \"f8b0b76d-6dd4-4f91-aaf4-c0cdcaa9308d\") " pod="openshift-console/console-68bf86dbc-bk66w" Oct 11 05:02:14 crc kubenswrapper[4651]: I1011 05:02:14.811858 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/f8b0b76d-6dd4-4f91-aaf4-c0cdcaa9308d-service-ca\") pod \"console-68bf86dbc-bk66w\" (UID: \"f8b0b76d-6dd4-4f91-aaf4-c0cdcaa9308d\") " pod="openshift-console/console-68bf86dbc-bk66w" Oct 11 05:02:14 crc kubenswrapper[4651]: I1011 05:02:14.811977 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/f8b0b76d-6dd4-4f91-aaf4-c0cdcaa9308d-console-oauth-config\") pod \"console-68bf86dbc-bk66w\" (UID: \"f8b0b76d-6dd4-4f91-aaf4-c0cdcaa9308d\") " pod="openshift-console/console-68bf86dbc-bk66w" Oct 11 05:02:14 crc kubenswrapper[4651]: I1011 05:02:14.812853 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/f8b0b76d-6dd4-4f91-aaf4-c0cdcaa9308d-oauth-serving-cert\") pod \"console-68bf86dbc-bk66w\" (UID: \"f8b0b76d-6dd4-4f91-aaf4-c0cdcaa9308d\") " pod="openshift-console/console-68bf86dbc-bk66w" Oct 11 05:02:14 crc kubenswrapper[4651]: I1011 05:02:14.813458 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/f8b0b76d-6dd4-4f91-aaf4-c0cdcaa9308d-service-ca\") pod \"console-68bf86dbc-bk66w\" (UID: \"f8b0b76d-6dd4-4f91-aaf4-c0cdcaa9308d\") " pod="openshift-console/console-68bf86dbc-bk66w" Oct 11 05:02:14 crc kubenswrapper[4651]: I1011 05:02:14.813537 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-fdff9cb8d-gg29l"] Oct 11 05:02:14 crc kubenswrapper[4651]: I1011 05:02:14.813770 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f8b0b76d-6dd4-4f91-aaf4-c0cdcaa9308d-trusted-ca-bundle\") pod \"console-68bf86dbc-bk66w\" (UID: \"f8b0b76d-6dd4-4f91-aaf4-c0cdcaa9308d\") " pod="openshift-console/console-68bf86dbc-bk66w" Oct 11 05:02:14 crc kubenswrapper[4651]: I1011 05:02:14.816167 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/f8b0b76d-6dd4-4f91-aaf4-c0cdcaa9308d-console-oauth-config\") pod \"console-68bf86dbc-bk66w\" (UID: \"f8b0b76d-6dd4-4f91-aaf4-c0cdcaa9308d\") " pod="openshift-console/console-68bf86dbc-bk66w" Oct 11 05:02:14 crc kubenswrapper[4651]: I1011 05:02:14.818762 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/f8b0b76d-6dd4-4f91-aaf4-c0cdcaa9308d-console-serving-cert\") pod \"console-68bf86dbc-bk66w\" (UID: \"f8b0b76d-6dd4-4f91-aaf4-c0cdcaa9308d\") " pod="openshift-console/console-68bf86dbc-bk66w" Oct 11 05:02:14 crc kubenswrapper[4651]: I1011 05:02:14.832723 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dm69l\" (UniqueName: \"kubernetes.io/projected/f8b0b76d-6dd4-4f91-aaf4-c0cdcaa9308d-kube-api-access-dm69l\") pod \"console-68bf86dbc-bk66w\" (UID: \"f8b0b76d-6dd4-4f91-aaf4-c0cdcaa9308d\") " pod="openshift-console/console-68bf86dbc-bk66w" Oct 11 05:02:14 crc kubenswrapper[4651]: I1011 05:02:14.986557 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-68bf86dbc-bk66w" Oct 11 05:02:15 crc kubenswrapper[4651]: I1011 05:02:15.014878 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/85293b68-d608-466d-9aa5-2b5eae8edc74-tls-key-pair\") pod \"nmstate-webhook-6cdbc54649-dlvdg\" (UID: \"85293b68-d608-466d-9aa5-2b5eae8edc74\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-dlvdg" Oct 11 05:02:15 crc kubenswrapper[4651]: I1011 05:02:15.019198 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/85293b68-d608-466d-9aa5-2b5eae8edc74-tls-key-pair\") pod \"nmstate-webhook-6cdbc54649-dlvdg\" (UID: \"85293b68-d608-466d-9aa5-2b5eae8edc74\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-dlvdg" Oct 11 05:02:15 crc kubenswrapper[4651]: I1011 05:02:15.165933 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-6b874cbd85-zwhxz"] Oct 11 05:02:15 crc kubenswrapper[4651]: I1011 05:02:15.226120 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-dlvdg" Oct 11 05:02:15 crc kubenswrapper[4651]: I1011 05:02:15.241378 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-68bf86dbc-bk66w"] Oct 11 05:02:15 crc kubenswrapper[4651]: I1011 05:02:15.505450 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-zwhxz" event={"ID":"03a76fa3-7b5c-4b1c-9c67-d54f0e448c2d","Type":"ContainerStarted","Data":"60f80be0579e5b7b18a0a6d61ee85c36634b9f1eb48a74d510254b67f2cac075"} Oct 11 05:02:15 crc kubenswrapper[4651]: I1011 05:02:15.507300 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-gg29l" event={"ID":"298ed4c9-1190-4617-a3c7-147f15e1fea3","Type":"ContainerStarted","Data":"488cb9e89bb94b551007ffbdeba3677c7794b1057c1292193597e71317de2e6b"} Oct 11 05:02:15 crc kubenswrapper[4651]: I1011 05:02:15.508552 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-qpgc7" event={"ID":"50f6467a-ad8d-4828-81ba-b944dccc4be7","Type":"ContainerStarted","Data":"44fee342eb4075f47e8f2e8ac0d10b12c6656e14e3007d4376b6336ebb3988fc"} Oct 11 05:02:15 crc kubenswrapper[4651]: I1011 05:02:15.510007 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-68bf86dbc-bk66w" event={"ID":"f8b0b76d-6dd4-4f91-aaf4-c0cdcaa9308d","Type":"ContainerStarted","Data":"03bcdc12fb996784021467fbdd7a2da6314c80745948d69d38d9c1843e49fd5d"} Oct 11 05:02:15 crc kubenswrapper[4651]: I1011 05:02:15.510063 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-68bf86dbc-bk66w" event={"ID":"f8b0b76d-6dd4-4f91-aaf4-c0cdcaa9308d","Type":"ContainerStarted","Data":"89eaf3bcc4f7ac5471cf3a60b838c388f90f5840664f66e7764a248959f28fd1"} Oct 11 05:02:15 crc kubenswrapper[4651]: I1011 05:02:15.527934 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-68bf86dbc-bk66w" podStartSLOduration=1.52790492 podStartE2EDuration="1.52790492s" podCreationTimestamp="2025-10-11 05:02:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 05:02:15.525207581 +0000 UTC m=+656.421440397" watchObservedRunningTime="2025-10-11 05:02:15.52790492 +0000 UTC m=+656.424137716" Oct 11 05:02:15 crc kubenswrapper[4651]: I1011 05:02:15.617108 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-6cdbc54649-dlvdg"] Oct 11 05:02:15 crc kubenswrapper[4651]: W1011 05:02:15.624532 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod85293b68_d608_466d_9aa5_2b5eae8edc74.slice/crio-c8b7d951d966cdc028cbf57b15cd561ea57a0bb5b0d8d7041160cfe845ddb7c0 WatchSource:0}: Error finding container c8b7d951d966cdc028cbf57b15cd561ea57a0bb5b0d8d7041160cfe845ddb7c0: Status 404 returned error can't find the container with id c8b7d951d966cdc028cbf57b15cd561ea57a0bb5b0d8d7041160cfe845ddb7c0 Oct 11 05:02:16 crc kubenswrapper[4651]: I1011 05:02:16.516762 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-dlvdg" event={"ID":"85293b68-d608-466d-9aa5-2b5eae8edc74","Type":"ContainerStarted","Data":"c8b7d951d966cdc028cbf57b15cd561ea57a0bb5b0d8d7041160cfe845ddb7c0"} Oct 11 05:02:17 crc kubenswrapper[4651]: I1011 05:02:17.526267 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-dlvdg" event={"ID":"85293b68-d608-466d-9aa5-2b5eae8edc74","Type":"ContainerStarted","Data":"12ebcd15f0443170eb78ba6ae23edbaf525561fb019859e6155a29d8d872bc96"} Oct 11 05:02:17 crc kubenswrapper[4651]: I1011 05:02:17.526504 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-dlvdg" Oct 11 05:02:17 crc kubenswrapper[4651]: I1011 05:02:17.527567 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-qpgc7" event={"ID":"50f6467a-ad8d-4828-81ba-b944dccc4be7","Type":"ContainerStarted","Data":"e817e70fb05cfbb4fd04b6a1788513889976bba94235657f29d58a3dc2843e66"} Oct 11 05:02:17 crc kubenswrapper[4651]: I1011 05:02:17.528206 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-qpgc7" Oct 11 05:02:17 crc kubenswrapper[4651]: I1011 05:02:17.529710 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-gg29l" event={"ID":"298ed4c9-1190-4617-a3c7-147f15e1fea3","Type":"ContainerStarted","Data":"1b5d3dd556908c0c459de93d8218d308873646bad5fc71d1c815be3805cde1f9"} Oct 11 05:02:17 crc kubenswrapper[4651]: I1011 05:02:17.541705 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-dlvdg" podStartSLOduration=1.985799536 podStartE2EDuration="3.541684496s" podCreationTimestamp="2025-10-11 05:02:14 +0000 UTC" firstStartedPulling="2025-10-11 05:02:15.626778912 +0000 UTC m=+656.523011708" lastFinishedPulling="2025-10-11 05:02:17.182663872 +0000 UTC m=+658.078896668" observedRunningTime="2025-10-11 05:02:17.539377087 +0000 UTC m=+658.435609893" watchObservedRunningTime="2025-10-11 05:02:17.541684496 +0000 UTC m=+658.437917292" Oct 11 05:02:17 crc kubenswrapper[4651]: I1011 05:02:17.569623 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-qpgc7" podStartSLOduration=1.048014544 podStartE2EDuration="3.569603446s" podCreationTimestamp="2025-10-11 05:02:14 +0000 UTC" firstStartedPulling="2025-10-11 05:02:14.660085955 +0000 UTC m=+655.556318751" lastFinishedPulling="2025-10-11 05:02:17.181674857 +0000 UTC m=+658.077907653" observedRunningTime="2025-10-11 05:02:17.568301252 +0000 UTC m=+658.464534068" watchObservedRunningTime="2025-10-11 05:02:17.569603446 +0000 UTC m=+658.465836242" Oct 11 05:02:18 crc kubenswrapper[4651]: I1011 05:02:18.536990 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-zwhxz" event={"ID":"03a76fa3-7b5c-4b1c-9c67-d54f0e448c2d","Type":"ContainerStarted","Data":"4fa104431aa7d9266c145df5964c6dfe74e1344f07a54ba6d78cc0814fe735fb"} Oct 11 05:02:19 crc kubenswrapper[4651]: I1011 05:02:19.547021 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-gg29l" event={"ID":"298ed4c9-1190-4617-a3c7-147f15e1fea3","Type":"ContainerStarted","Data":"84149fafcc4d88112133f5c2f4f5d90a2056ece8d464edba63ff3a1778f2b3d2"} Oct 11 05:02:19 crc kubenswrapper[4651]: I1011 05:02:19.569595 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-gg29l" podStartSLOduration=1.118190367 podStartE2EDuration="5.569576152s" podCreationTimestamp="2025-10-11 05:02:14 +0000 UTC" firstStartedPulling="2025-10-11 05:02:14.82398126 +0000 UTC m=+655.720214056" lastFinishedPulling="2025-10-11 05:02:19.275367045 +0000 UTC m=+660.171599841" observedRunningTime="2025-10-11 05:02:19.568779212 +0000 UTC m=+660.465012028" watchObservedRunningTime="2025-10-11 05:02:19.569576152 +0000 UTC m=+660.465808968" Oct 11 05:02:19 crc kubenswrapper[4651]: I1011 05:02:19.574587 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-zwhxz" podStartSLOduration=2.599936994 podStartE2EDuration="5.574569799s" podCreationTimestamp="2025-10-11 05:02:14 +0000 UTC" firstStartedPulling="2025-10-11 05:02:15.174909039 +0000 UTC m=+656.071141835" lastFinishedPulling="2025-10-11 05:02:18.149541844 +0000 UTC m=+659.045774640" observedRunningTime="2025-10-11 05:02:18.562791396 +0000 UTC m=+659.459024202" watchObservedRunningTime="2025-10-11 05:02:19.574569799 +0000 UTC m=+660.470802605" Oct 11 05:02:24 crc kubenswrapper[4651]: I1011 05:02:24.669470 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-qpgc7" Oct 11 05:02:24 crc kubenswrapper[4651]: I1011 05:02:24.987651 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-68bf86dbc-bk66w" Oct 11 05:02:24 crc kubenswrapper[4651]: I1011 05:02:24.988044 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-68bf86dbc-bk66w" Oct 11 05:02:24 crc kubenswrapper[4651]: I1011 05:02:24.995305 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-68bf86dbc-bk66w" Oct 11 05:02:25 crc kubenswrapper[4651]: I1011 05:02:25.594312 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-68bf86dbc-bk66w" Oct 11 05:02:25 crc kubenswrapper[4651]: I1011 05:02:25.666556 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-pc67v"] Oct 11 05:02:35 crc kubenswrapper[4651]: I1011 05:02:35.233096 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-dlvdg" Oct 11 05:02:48 crc kubenswrapper[4651]: I1011 05:02:48.502915 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2xfp9d"] Oct 11 05:02:48 crc kubenswrapper[4651]: I1011 05:02:48.504690 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2xfp9d" Oct 11 05:02:48 crc kubenswrapper[4651]: I1011 05:02:48.506809 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Oct 11 05:02:48 crc kubenswrapper[4651]: I1011 05:02:48.518236 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2xfp9d"] Oct 11 05:02:48 crc kubenswrapper[4651]: I1011 05:02:48.638574 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/080b871c-11bf-4ef1-b785-9058524b6c82-util\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2xfp9d\" (UID: \"080b871c-11bf-4ef1-b785-9058524b6c82\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2xfp9d" Oct 11 05:02:48 crc kubenswrapper[4651]: I1011 05:02:48.638671 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/080b871c-11bf-4ef1-b785-9058524b6c82-bundle\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2xfp9d\" (UID: \"080b871c-11bf-4ef1-b785-9058524b6c82\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2xfp9d" Oct 11 05:02:48 crc kubenswrapper[4651]: I1011 05:02:48.638899 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zcfcw\" (UniqueName: \"kubernetes.io/projected/080b871c-11bf-4ef1-b785-9058524b6c82-kube-api-access-zcfcw\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2xfp9d\" (UID: \"080b871c-11bf-4ef1-b785-9058524b6c82\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2xfp9d" Oct 11 05:02:48 crc kubenswrapper[4651]: I1011 05:02:48.739917 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/080b871c-11bf-4ef1-b785-9058524b6c82-util\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2xfp9d\" (UID: \"080b871c-11bf-4ef1-b785-9058524b6c82\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2xfp9d" Oct 11 05:02:48 crc kubenswrapper[4651]: I1011 05:02:48.740030 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/080b871c-11bf-4ef1-b785-9058524b6c82-bundle\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2xfp9d\" (UID: \"080b871c-11bf-4ef1-b785-9058524b6c82\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2xfp9d" Oct 11 05:02:48 crc kubenswrapper[4651]: I1011 05:02:48.740118 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zcfcw\" (UniqueName: \"kubernetes.io/projected/080b871c-11bf-4ef1-b785-9058524b6c82-kube-api-access-zcfcw\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2xfp9d\" (UID: \"080b871c-11bf-4ef1-b785-9058524b6c82\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2xfp9d" Oct 11 05:02:48 crc kubenswrapper[4651]: I1011 05:02:48.740472 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/080b871c-11bf-4ef1-b785-9058524b6c82-util\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2xfp9d\" (UID: \"080b871c-11bf-4ef1-b785-9058524b6c82\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2xfp9d" Oct 11 05:02:48 crc kubenswrapper[4651]: I1011 05:02:48.740499 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/080b871c-11bf-4ef1-b785-9058524b6c82-bundle\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2xfp9d\" (UID: \"080b871c-11bf-4ef1-b785-9058524b6c82\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2xfp9d" Oct 11 05:02:48 crc kubenswrapper[4651]: I1011 05:02:48.760922 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zcfcw\" (UniqueName: \"kubernetes.io/projected/080b871c-11bf-4ef1-b785-9058524b6c82-kube-api-access-zcfcw\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2xfp9d\" (UID: \"080b871c-11bf-4ef1-b785-9058524b6c82\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2xfp9d" Oct 11 05:02:48 crc kubenswrapper[4651]: I1011 05:02:48.818689 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2xfp9d" Oct 11 05:02:49 crc kubenswrapper[4651]: I1011 05:02:49.066127 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2xfp9d"] Oct 11 05:02:49 crc kubenswrapper[4651]: I1011 05:02:49.767413 4651 generic.go:334] "Generic (PLEG): container finished" podID="080b871c-11bf-4ef1-b785-9058524b6c82" containerID="2fc7da570d4fbbdf04e03acbf89a8477a74259fd67d5a2430a6832967ac53574" exitCode=0 Oct 11 05:02:49 crc kubenswrapper[4651]: I1011 05:02:49.767555 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2xfp9d" event={"ID":"080b871c-11bf-4ef1-b785-9058524b6c82","Type":"ContainerDied","Data":"2fc7da570d4fbbdf04e03acbf89a8477a74259fd67d5a2430a6832967ac53574"} Oct 11 05:02:49 crc kubenswrapper[4651]: I1011 05:02:49.767854 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2xfp9d" event={"ID":"080b871c-11bf-4ef1-b785-9058524b6c82","Type":"ContainerStarted","Data":"37c4fc2a668253fad6207b12bc9ca26268e669c7600cf3727d0ffef98d7af9ed"} Oct 11 05:02:50 crc kubenswrapper[4651]: I1011 05:02:50.733926 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-pc67v" podUID="ccf042b9-768a-413d-bc29-58ab74c06fc9" containerName="console" containerID="cri-o://6b64d5a6afe3aee87f84ffda80f57bb957ad4c68893ce240e5b13fd953481335" gracePeriod=15 Oct 11 05:02:51 crc kubenswrapper[4651]: I1011 05:02:51.097802 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-pc67v_ccf042b9-768a-413d-bc29-58ab74c06fc9/console/0.log" Oct 11 05:02:51 crc kubenswrapper[4651]: I1011 05:02:51.097909 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-pc67v" Oct 11 05:02:51 crc kubenswrapper[4651]: I1011 05:02:51.274235 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/ccf042b9-768a-413d-bc29-58ab74c06fc9-console-serving-cert\") pod \"ccf042b9-768a-413d-bc29-58ab74c06fc9\" (UID: \"ccf042b9-768a-413d-bc29-58ab74c06fc9\") " Oct 11 05:02:51 crc kubenswrapper[4651]: I1011 05:02:51.274451 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/ccf042b9-768a-413d-bc29-58ab74c06fc9-console-config\") pod \"ccf042b9-768a-413d-bc29-58ab74c06fc9\" (UID: \"ccf042b9-768a-413d-bc29-58ab74c06fc9\") " Oct 11 05:02:51 crc kubenswrapper[4651]: I1011 05:02:51.274489 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/ccf042b9-768a-413d-bc29-58ab74c06fc9-console-oauth-config\") pod \"ccf042b9-768a-413d-bc29-58ab74c06fc9\" (UID: \"ccf042b9-768a-413d-bc29-58ab74c06fc9\") " Oct 11 05:02:51 crc kubenswrapper[4651]: I1011 05:02:51.274509 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-psnct\" (UniqueName: \"kubernetes.io/projected/ccf042b9-768a-413d-bc29-58ab74c06fc9-kube-api-access-psnct\") pod \"ccf042b9-768a-413d-bc29-58ab74c06fc9\" (UID: \"ccf042b9-768a-413d-bc29-58ab74c06fc9\") " Oct 11 05:02:51 crc kubenswrapper[4651]: I1011 05:02:51.274535 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/ccf042b9-768a-413d-bc29-58ab74c06fc9-service-ca\") pod \"ccf042b9-768a-413d-bc29-58ab74c06fc9\" (UID: \"ccf042b9-768a-413d-bc29-58ab74c06fc9\") " Oct 11 05:02:51 crc kubenswrapper[4651]: I1011 05:02:51.274568 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/ccf042b9-768a-413d-bc29-58ab74c06fc9-oauth-serving-cert\") pod \"ccf042b9-768a-413d-bc29-58ab74c06fc9\" (UID: \"ccf042b9-768a-413d-bc29-58ab74c06fc9\") " Oct 11 05:02:51 crc kubenswrapper[4651]: I1011 05:02:51.274611 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ccf042b9-768a-413d-bc29-58ab74c06fc9-trusted-ca-bundle\") pod \"ccf042b9-768a-413d-bc29-58ab74c06fc9\" (UID: \"ccf042b9-768a-413d-bc29-58ab74c06fc9\") " Oct 11 05:02:51 crc kubenswrapper[4651]: I1011 05:02:51.275527 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ccf042b9-768a-413d-bc29-58ab74c06fc9-console-config" (OuterVolumeSpecName: "console-config") pod "ccf042b9-768a-413d-bc29-58ab74c06fc9" (UID: "ccf042b9-768a-413d-bc29-58ab74c06fc9"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:02:51 crc kubenswrapper[4651]: I1011 05:02:51.275612 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ccf042b9-768a-413d-bc29-58ab74c06fc9-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "ccf042b9-768a-413d-bc29-58ab74c06fc9" (UID: "ccf042b9-768a-413d-bc29-58ab74c06fc9"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:02:51 crc kubenswrapper[4651]: I1011 05:02:51.275886 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ccf042b9-768a-413d-bc29-58ab74c06fc9-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "ccf042b9-768a-413d-bc29-58ab74c06fc9" (UID: "ccf042b9-768a-413d-bc29-58ab74c06fc9"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:02:51 crc kubenswrapper[4651]: I1011 05:02:51.277162 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ccf042b9-768a-413d-bc29-58ab74c06fc9-service-ca" (OuterVolumeSpecName: "service-ca") pod "ccf042b9-768a-413d-bc29-58ab74c06fc9" (UID: "ccf042b9-768a-413d-bc29-58ab74c06fc9"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:02:51 crc kubenswrapper[4651]: I1011 05:02:51.279990 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ccf042b9-768a-413d-bc29-58ab74c06fc9-kube-api-access-psnct" (OuterVolumeSpecName: "kube-api-access-psnct") pod "ccf042b9-768a-413d-bc29-58ab74c06fc9" (UID: "ccf042b9-768a-413d-bc29-58ab74c06fc9"). InnerVolumeSpecName "kube-api-access-psnct". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:02:51 crc kubenswrapper[4651]: I1011 05:02:51.280028 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ccf042b9-768a-413d-bc29-58ab74c06fc9-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "ccf042b9-768a-413d-bc29-58ab74c06fc9" (UID: "ccf042b9-768a-413d-bc29-58ab74c06fc9"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:02:51 crc kubenswrapper[4651]: I1011 05:02:51.280551 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ccf042b9-768a-413d-bc29-58ab74c06fc9-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "ccf042b9-768a-413d-bc29-58ab74c06fc9" (UID: "ccf042b9-768a-413d-bc29-58ab74c06fc9"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:02:51 crc kubenswrapper[4651]: I1011 05:02:51.376903 4651 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/ccf042b9-768a-413d-bc29-58ab74c06fc9-console-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 11 05:02:51 crc kubenswrapper[4651]: I1011 05:02:51.376946 4651 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/ccf042b9-768a-413d-bc29-58ab74c06fc9-console-config\") on node \"crc\" DevicePath \"\"" Oct 11 05:02:51 crc kubenswrapper[4651]: I1011 05:02:51.376971 4651 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/ccf042b9-768a-413d-bc29-58ab74c06fc9-console-oauth-config\") on node \"crc\" DevicePath \"\"" Oct 11 05:02:51 crc kubenswrapper[4651]: I1011 05:02:51.376983 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-psnct\" (UniqueName: \"kubernetes.io/projected/ccf042b9-768a-413d-bc29-58ab74c06fc9-kube-api-access-psnct\") on node \"crc\" DevicePath \"\"" Oct 11 05:02:51 crc kubenswrapper[4651]: I1011 05:02:51.376995 4651 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/ccf042b9-768a-413d-bc29-58ab74c06fc9-service-ca\") on node \"crc\" DevicePath \"\"" Oct 11 05:02:51 crc kubenswrapper[4651]: I1011 05:02:51.377005 4651 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/ccf042b9-768a-413d-bc29-58ab74c06fc9-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 11 05:02:51 crc kubenswrapper[4651]: I1011 05:02:51.377015 4651 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ccf042b9-768a-413d-bc29-58ab74c06fc9-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 05:02:51 crc kubenswrapper[4651]: I1011 05:02:51.783751 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-pc67v_ccf042b9-768a-413d-bc29-58ab74c06fc9/console/0.log" Oct 11 05:02:51 crc kubenswrapper[4651]: I1011 05:02:51.783809 4651 generic.go:334] "Generic (PLEG): container finished" podID="ccf042b9-768a-413d-bc29-58ab74c06fc9" containerID="6b64d5a6afe3aee87f84ffda80f57bb957ad4c68893ce240e5b13fd953481335" exitCode=2 Oct 11 05:02:51 crc kubenswrapper[4651]: I1011 05:02:51.783916 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-pc67v" event={"ID":"ccf042b9-768a-413d-bc29-58ab74c06fc9","Type":"ContainerDied","Data":"6b64d5a6afe3aee87f84ffda80f57bb957ad4c68893ce240e5b13fd953481335"} Oct 11 05:02:51 crc kubenswrapper[4651]: I1011 05:02:51.783982 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-pc67v" event={"ID":"ccf042b9-768a-413d-bc29-58ab74c06fc9","Type":"ContainerDied","Data":"7f446fc7e5c783c26633082ac1f62a5d8e42981388f8754731888287b4d0b2a7"} Oct 11 05:02:51 crc kubenswrapper[4651]: I1011 05:02:51.784011 4651 scope.go:117] "RemoveContainer" containerID="6b64d5a6afe3aee87f84ffda80f57bb957ad4c68893ce240e5b13fd953481335" Oct 11 05:02:51 crc kubenswrapper[4651]: I1011 05:02:51.784037 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-pc67v" Oct 11 05:02:51 crc kubenswrapper[4651]: I1011 05:02:51.787684 4651 generic.go:334] "Generic (PLEG): container finished" podID="080b871c-11bf-4ef1-b785-9058524b6c82" containerID="676b26dd7c97260b66cc52af4f91ff0a6b92fb0fb92a40d4849d4853f57de329" exitCode=0 Oct 11 05:02:51 crc kubenswrapper[4651]: I1011 05:02:51.787743 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2xfp9d" event={"ID":"080b871c-11bf-4ef1-b785-9058524b6c82","Type":"ContainerDied","Data":"676b26dd7c97260b66cc52af4f91ff0a6b92fb0fb92a40d4849d4853f57de329"} Oct 11 05:02:51 crc kubenswrapper[4651]: I1011 05:02:51.814316 4651 scope.go:117] "RemoveContainer" containerID="6b64d5a6afe3aee87f84ffda80f57bb957ad4c68893ce240e5b13fd953481335" Oct 11 05:02:51 crc kubenswrapper[4651]: E1011 05:02:51.814792 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6b64d5a6afe3aee87f84ffda80f57bb957ad4c68893ce240e5b13fd953481335\": container with ID starting with 6b64d5a6afe3aee87f84ffda80f57bb957ad4c68893ce240e5b13fd953481335 not found: ID does not exist" containerID="6b64d5a6afe3aee87f84ffda80f57bb957ad4c68893ce240e5b13fd953481335" Oct 11 05:02:51 crc kubenswrapper[4651]: I1011 05:02:51.814834 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6b64d5a6afe3aee87f84ffda80f57bb957ad4c68893ce240e5b13fd953481335"} err="failed to get container status \"6b64d5a6afe3aee87f84ffda80f57bb957ad4c68893ce240e5b13fd953481335\": rpc error: code = NotFound desc = could not find container \"6b64d5a6afe3aee87f84ffda80f57bb957ad4c68893ce240e5b13fd953481335\": container with ID starting with 6b64d5a6afe3aee87f84ffda80f57bb957ad4c68893ce240e5b13fd953481335 not found: ID does not exist" Oct 11 05:02:51 crc kubenswrapper[4651]: I1011 05:02:51.827325 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-pc67v"] Oct 11 05:02:51 crc kubenswrapper[4651]: I1011 05:02:51.830503 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-pc67v"] Oct 11 05:02:51 crc kubenswrapper[4651]: I1011 05:02:51.877389 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ccf042b9-768a-413d-bc29-58ab74c06fc9" path="/var/lib/kubelet/pods/ccf042b9-768a-413d-bc29-58ab74c06fc9/volumes" Oct 11 05:02:52 crc kubenswrapper[4651]: I1011 05:02:52.084629 4651 patch_prober.go:28] interesting pod/console-f9d7485db-pc67v container/console namespace/openshift-console: Readiness probe status=failure output="Get \"https://10.217.0.13:8443/health\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Oct 11 05:02:52 crc kubenswrapper[4651]: I1011 05:02:52.085027 4651 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/console-f9d7485db-pc67v" podUID="ccf042b9-768a-413d-bc29-58ab74c06fc9" containerName="console" probeResult="failure" output="Get \"https://10.217.0.13:8443/health\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Oct 11 05:02:52 crc kubenswrapper[4651]: I1011 05:02:52.800979 4651 generic.go:334] "Generic (PLEG): container finished" podID="080b871c-11bf-4ef1-b785-9058524b6c82" containerID="9630663dbf90bf6cee312147d5398c44762f28deaf8094257c6e61366feb74e6" exitCode=0 Oct 11 05:02:52 crc kubenswrapper[4651]: I1011 05:02:52.801053 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2xfp9d" event={"ID":"080b871c-11bf-4ef1-b785-9058524b6c82","Type":"ContainerDied","Data":"9630663dbf90bf6cee312147d5398c44762f28deaf8094257c6e61366feb74e6"} Oct 11 05:02:54 crc kubenswrapper[4651]: I1011 05:02:54.085888 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2xfp9d" Oct 11 05:02:54 crc kubenswrapper[4651]: I1011 05:02:54.212040 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zcfcw\" (UniqueName: \"kubernetes.io/projected/080b871c-11bf-4ef1-b785-9058524b6c82-kube-api-access-zcfcw\") pod \"080b871c-11bf-4ef1-b785-9058524b6c82\" (UID: \"080b871c-11bf-4ef1-b785-9058524b6c82\") " Oct 11 05:02:54 crc kubenswrapper[4651]: I1011 05:02:54.212158 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/080b871c-11bf-4ef1-b785-9058524b6c82-util\") pod \"080b871c-11bf-4ef1-b785-9058524b6c82\" (UID: \"080b871c-11bf-4ef1-b785-9058524b6c82\") " Oct 11 05:02:54 crc kubenswrapper[4651]: I1011 05:02:54.212190 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/080b871c-11bf-4ef1-b785-9058524b6c82-bundle\") pod \"080b871c-11bf-4ef1-b785-9058524b6c82\" (UID: \"080b871c-11bf-4ef1-b785-9058524b6c82\") " Oct 11 05:02:54 crc kubenswrapper[4651]: I1011 05:02:54.213376 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/080b871c-11bf-4ef1-b785-9058524b6c82-bundle" (OuterVolumeSpecName: "bundle") pod "080b871c-11bf-4ef1-b785-9058524b6c82" (UID: "080b871c-11bf-4ef1-b785-9058524b6c82"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 05:02:54 crc kubenswrapper[4651]: I1011 05:02:54.223072 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/080b871c-11bf-4ef1-b785-9058524b6c82-kube-api-access-zcfcw" (OuterVolumeSpecName: "kube-api-access-zcfcw") pod "080b871c-11bf-4ef1-b785-9058524b6c82" (UID: "080b871c-11bf-4ef1-b785-9058524b6c82"). InnerVolumeSpecName "kube-api-access-zcfcw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:02:54 crc kubenswrapper[4651]: I1011 05:02:54.226062 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/080b871c-11bf-4ef1-b785-9058524b6c82-util" (OuterVolumeSpecName: "util") pod "080b871c-11bf-4ef1-b785-9058524b6c82" (UID: "080b871c-11bf-4ef1-b785-9058524b6c82"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 05:02:54 crc kubenswrapper[4651]: I1011 05:02:54.313132 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zcfcw\" (UniqueName: \"kubernetes.io/projected/080b871c-11bf-4ef1-b785-9058524b6c82-kube-api-access-zcfcw\") on node \"crc\" DevicePath \"\"" Oct 11 05:02:54 crc kubenswrapper[4651]: I1011 05:02:54.313162 4651 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/080b871c-11bf-4ef1-b785-9058524b6c82-util\") on node \"crc\" DevicePath \"\"" Oct 11 05:02:54 crc kubenswrapper[4651]: I1011 05:02:54.313176 4651 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/080b871c-11bf-4ef1-b785-9058524b6c82-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 05:02:54 crc kubenswrapper[4651]: I1011 05:02:54.818928 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2xfp9d" event={"ID":"080b871c-11bf-4ef1-b785-9058524b6c82","Type":"ContainerDied","Data":"37c4fc2a668253fad6207b12bc9ca26268e669c7600cf3727d0ffef98d7af9ed"} Oct 11 05:02:54 crc kubenswrapper[4651]: I1011 05:02:54.818972 4651 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="37c4fc2a668253fad6207b12bc9ca26268e669c7600cf3727d0ffef98d7af9ed" Oct 11 05:02:54 crc kubenswrapper[4651]: I1011 05:02:54.819049 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2xfp9d" Oct 11 05:03:03 crc kubenswrapper[4651]: I1011 05:03:03.864279 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-64d4c8dfd9-ssxfc"] Oct 11 05:03:03 crc kubenswrapper[4651]: E1011 05:03:03.864745 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="080b871c-11bf-4ef1-b785-9058524b6c82" containerName="extract" Oct 11 05:03:03 crc kubenswrapper[4651]: I1011 05:03:03.864756 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="080b871c-11bf-4ef1-b785-9058524b6c82" containerName="extract" Oct 11 05:03:03 crc kubenswrapper[4651]: E1011 05:03:03.864768 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ccf042b9-768a-413d-bc29-58ab74c06fc9" containerName="console" Oct 11 05:03:03 crc kubenswrapper[4651]: I1011 05:03:03.864774 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="ccf042b9-768a-413d-bc29-58ab74c06fc9" containerName="console" Oct 11 05:03:03 crc kubenswrapper[4651]: E1011 05:03:03.864783 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="080b871c-11bf-4ef1-b785-9058524b6c82" containerName="util" Oct 11 05:03:03 crc kubenswrapper[4651]: I1011 05:03:03.864789 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="080b871c-11bf-4ef1-b785-9058524b6c82" containerName="util" Oct 11 05:03:03 crc kubenswrapper[4651]: E1011 05:03:03.864801 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="080b871c-11bf-4ef1-b785-9058524b6c82" containerName="pull" Oct 11 05:03:03 crc kubenswrapper[4651]: I1011 05:03:03.864807 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="080b871c-11bf-4ef1-b785-9058524b6c82" containerName="pull" Oct 11 05:03:03 crc kubenswrapper[4651]: I1011 05:03:03.864935 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="ccf042b9-768a-413d-bc29-58ab74c06fc9" containerName="console" Oct 11 05:03:03 crc kubenswrapper[4651]: I1011 05:03:03.864951 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="080b871c-11bf-4ef1-b785-9058524b6c82" containerName="extract" Oct 11 05:03:03 crc kubenswrapper[4651]: I1011 05:03:03.865375 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-64d4c8dfd9-ssxfc" Oct 11 05:03:03 crc kubenswrapper[4651]: I1011 05:03:03.870752 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Oct 11 05:03:03 crc kubenswrapper[4651]: I1011 05:03:03.870914 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Oct 11 05:03:03 crc kubenswrapper[4651]: I1011 05:03:03.870934 4651 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Oct 11 05:03:03 crc kubenswrapper[4651]: I1011 05:03:03.870921 4651 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Oct 11 05:03:03 crc kubenswrapper[4651]: I1011 05:03:03.871026 4651 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-6ccfk" Oct 11 05:03:03 crc kubenswrapper[4651]: I1011 05:03:03.889952 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-64d4c8dfd9-ssxfc"] Oct 11 05:03:04 crc kubenswrapper[4651]: I1011 05:03:04.037771 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p4zv9\" (UniqueName: \"kubernetes.io/projected/176a1bb0-149f-47fd-b9ba-d3249b405fa1-kube-api-access-p4zv9\") pod \"metallb-operator-controller-manager-64d4c8dfd9-ssxfc\" (UID: \"176a1bb0-149f-47fd-b9ba-d3249b405fa1\") " pod="metallb-system/metallb-operator-controller-manager-64d4c8dfd9-ssxfc" Oct 11 05:03:04 crc kubenswrapper[4651]: I1011 05:03:04.037811 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/176a1bb0-149f-47fd-b9ba-d3249b405fa1-apiservice-cert\") pod \"metallb-operator-controller-manager-64d4c8dfd9-ssxfc\" (UID: \"176a1bb0-149f-47fd-b9ba-d3249b405fa1\") " pod="metallb-system/metallb-operator-controller-manager-64d4c8dfd9-ssxfc" Oct 11 05:03:04 crc kubenswrapper[4651]: I1011 05:03:04.037873 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/176a1bb0-149f-47fd-b9ba-d3249b405fa1-webhook-cert\") pod \"metallb-operator-controller-manager-64d4c8dfd9-ssxfc\" (UID: \"176a1bb0-149f-47fd-b9ba-d3249b405fa1\") " pod="metallb-system/metallb-operator-controller-manager-64d4c8dfd9-ssxfc" Oct 11 05:03:04 crc kubenswrapper[4651]: I1011 05:03:04.138467 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/176a1bb0-149f-47fd-b9ba-d3249b405fa1-webhook-cert\") pod \"metallb-operator-controller-manager-64d4c8dfd9-ssxfc\" (UID: \"176a1bb0-149f-47fd-b9ba-d3249b405fa1\") " pod="metallb-system/metallb-operator-controller-manager-64d4c8dfd9-ssxfc" Oct 11 05:03:04 crc kubenswrapper[4651]: I1011 05:03:04.138552 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p4zv9\" (UniqueName: \"kubernetes.io/projected/176a1bb0-149f-47fd-b9ba-d3249b405fa1-kube-api-access-p4zv9\") pod \"metallb-operator-controller-manager-64d4c8dfd9-ssxfc\" (UID: \"176a1bb0-149f-47fd-b9ba-d3249b405fa1\") " pod="metallb-system/metallb-operator-controller-manager-64d4c8dfd9-ssxfc" Oct 11 05:03:04 crc kubenswrapper[4651]: I1011 05:03:04.138574 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/176a1bb0-149f-47fd-b9ba-d3249b405fa1-apiservice-cert\") pod \"metallb-operator-controller-manager-64d4c8dfd9-ssxfc\" (UID: \"176a1bb0-149f-47fd-b9ba-d3249b405fa1\") " pod="metallb-system/metallb-operator-controller-manager-64d4c8dfd9-ssxfc" Oct 11 05:03:04 crc kubenswrapper[4651]: I1011 05:03:04.144513 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/176a1bb0-149f-47fd-b9ba-d3249b405fa1-apiservice-cert\") pod \"metallb-operator-controller-manager-64d4c8dfd9-ssxfc\" (UID: \"176a1bb0-149f-47fd-b9ba-d3249b405fa1\") " pod="metallb-system/metallb-operator-controller-manager-64d4c8dfd9-ssxfc" Oct 11 05:03:04 crc kubenswrapper[4651]: I1011 05:03:04.153378 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/176a1bb0-149f-47fd-b9ba-d3249b405fa1-webhook-cert\") pod \"metallb-operator-controller-manager-64d4c8dfd9-ssxfc\" (UID: \"176a1bb0-149f-47fd-b9ba-d3249b405fa1\") " pod="metallb-system/metallb-operator-controller-manager-64d4c8dfd9-ssxfc" Oct 11 05:03:04 crc kubenswrapper[4651]: I1011 05:03:04.154354 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p4zv9\" (UniqueName: \"kubernetes.io/projected/176a1bb0-149f-47fd-b9ba-d3249b405fa1-kube-api-access-p4zv9\") pod \"metallb-operator-controller-manager-64d4c8dfd9-ssxfc\" (UID: \"176a1bb0-149f-47fd-b9ba-d3249b405fa1\") " pod="metallb-system/metallb-operator-controller-manager-64d4c8dfd9-ssxfc" Oct 11 05:03:04 crc kubenswrapper[4651]: I1011 05:03:04.179926 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-64d4c8dfd9-ssxfc" Oct 11 05:03:04 crc kubenswrapper[4651]: I1011 05:03:04.239006 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-6ddcb68dc7-mhfj6"] Oct 11 05:03:04 crc kubenswrapper[4651]: I1011 05:03:04.239686 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-6ddcb68dc7-mhfj6" Oct 11 05:03:04 crc kubenswrapper[4651]: I1011 05:03:04.241118 4651 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-2m7rt" Oct 11 05:03:04 crc kubenswrapper[4651]: I1011 05:03:04.242712 4651 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Oct 11 05:03:04 crc kubenswrapper[4651]: I1011 05:03:04.242773 4651 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Oct 11 05:03:04 crc kubenswrapper[4651]: I1011 05:03:04.262663 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-6ddcb68dc7-mhfj6"] Oct 11 05:03:04 crc kubenswrapper[4651]: I1011 05:03:04.341025 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pxdxg\" (UniqueName: \"kubernetes.io/projected/3da924ac-510e-4c75-8e5d-2571c454a7a5-kube-api-access-pxdxg\") pod \"metallb-operator-webhook-server-6ddcb68dc7-mhfj6\" (UID: \"3da924ac-510e-4c75-8e5d-2571c454a7a5\") " pod="metallb-system/metallb-operator-webhook-server-6ddcb68dc7-mhfj6" Oct 11 05:03:04 crc kubenswrapper[4651]: I1011 05:03:04.341342 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/3da924ac-510e-4c75-8e5d-2571c454a7a5-webhook-cert\") pod \"metallb-operator-webhook-server-6ddcb68dc7-mhfj6\" (UID: \"3da924ac-510e-4c75-8e5d-2571c454a7a5\") " pod="metallb-system/metallb-operator-webhook-server-6ddcb68dc7-mhfj6" Oct 11 05:03:04 crc kubenswrapper[4651]: I1011 05:03:04.341417 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/3da924ac-510e-4c75-8e5d-2571c454a7a5-apiservice-cert\") pod \"metallb-operator-webhook-server-6ddcb68dc7-mhfj6\" (UID: \"3da924ac-510e-4c75-8e5d-2571c454a7a5\") " pod="metallb-system/metallb-operator-webhook-server-6ddcb68dc7-mhfj6" Oct 11 05:03:04 crc kubenswrapper[4651]: I1011 05:03:04.423065 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-64d4c8dfd9-ssxfc"] Oct 11 05:03:04 crc kubenswrapper[4651]: I1011 05:03:04.442545 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/3da924ac-510e-4c75-8e5d-2571c454a7a5-webhook-cert\") pod \"metallb-operator-webhook-server-6ddcb68dc7-mhfj6\" (UID: \"3da924ac-510e-4c75-8e5d-2571c454a7a5\") " pod="metallb-system/metallb-operator-webhook-server-6ddcb68dc7-mhfj6" Oct 11 05:03:04 crc kubenswrapper[4651]: I1011 05:03:04.442589 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pxdxg\" (UniqueName: \"kubernetes.io/projected/3da924ac-510e-4c75-8e5d-2571c454a7a5-kube-api-access-pxdxg\") pod \"metallb-operator-webhook-server-6ddcb68dc7-mhfj6\" (UID: \"3da924ac-510e-4c75-8e5d-2571c454a7a5\") " pod="metallb-system/metallb-operator-webhook-server-6ddcb68dc7-mhfj6" Oct 11 05:03:04 crc kubenswrapper[4651]: I1011 05:03:04.442647 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/3da924ac-510e-4c75-8e5d-2571c454a7a5-apiservice-cert\") pod \"metallb-operator-webhook-server-6ddcb68dc7-mhfj6\" (UID: \"3da924ac-510e-4c75-8e5d-2571c454a7a5\") " pod="metallb-system/metallb-operator-webhook-server-6ddcb68dc7-mhfj6" Oct 11 05:03:04 crc kubenswrapper[4651]: I1011 05:03:04.450296 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/3da924ac-510e-4c75-8e5d-2571c454a7a5-webhook-cert\") pod \"metallb-operator-webhook-server-6ddcb68dc7-mhfj6\" (UID: \"3da924ac-510e-4c75-8e5d-2571c454a7a5\") " pod="metallb-system/metallb-operator-webhook-server-6ddcb68dc7-mhfj6" Oct 11 05:03:04 crc kubenswrapper[4651]: I1011 05:03:04.450347 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/3da924ac-510e-4c75-8e5d-2571c454a7a5-apiservice-cert\") pod \"metallb-operator-webhook-server-6ddcb68dc7-mhfj6\" (UID: \"3da924ac-510e-4c75-8e5d-2571c454a7a5\") " pod="metallb-system/metallb-operator-webhook-server-6ddcb68dc7-mhfj6" Oct 11 05:03:04 crc kubenswrapper[4651]: I1011 05:03:04.464192 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pxdxg\" (UniqueName: \"kubernetes.io/projected/3da924ac-510e-4c75-8e5d-2571c454a7a5-kube-api-access-pxdxg\") pod \"metallb-operator-webhook-server-6ddcb68dc7-mhfj6\" (UID: \"3da924ac-510e-4c75-8e5d-2571c454a7a5\") " pod="metallb-system/metallb-operator-webhook-server-6ddcb68dc7-mhfj6" Oct 11 05:03:04 crc kubenswrapper[4651]: I1011 05:03:04.553484 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-6ddcb68dc7-mhfj6" Oct 11 05:03:04 crc kubenswrapper[4651]: I1011 05:03:04.815933 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-6ddcb68dc7-mhfj6"] Oct 11 05:03:04 crc kubenswrapper[4651]: W1011 05:03:04.821785 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3da924ac_510e_4c75_8e5d_2571c454a7a5.slice/crio-acd75498833213d45897762894299d7785c8428b5f89d2e37bb5cdea1837bc59 WatchSource:0}: Error finding container acd75498833213d45897762894299d7785c8428b5f89d2e37bb5cdea1837bc59: Status 404 returned error can't find the container with id acd75498833213d45897762894299d7785c8428b5f89d2e37bb5cdea1837bc59 Oct 11 05:03:04 crc kubenswrapper[4651]: I1011 05:03:04.907881 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-64d4c8dfd9-ssxfc" event={"ID":"176a1bb0-149f-47fd-b9ba-d3249b405fa1","Type":"ContainerStarted","Data":"8bc0c2abecfd80d3b62d932aa42b04f99493c483cdc78a08df02dd63f1d6cec7"} Oct 11 05:03:04 crc kubenswrapper[4651]: I1011 05:03:04.908800 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-6ddcb68dc7-mhfj6" event={"ID":"3da924ac-510e-4c75-8e5d-2571c454a7a5","Type":"ContainerStarted","Data":"acd75498833213d45897762894299d7785c8428b5f89d2e37bb5cdea1837bc59"} Oct 11 05:03:07 crc kubenswrapper[4651]: I1011 05:03:07.929344 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-64d4c8dfd9-ssxfc" event={"ID":"176a1bb0-149f-47fd-b9ba-d3249b405fa1","Type":"ContainerStarted","Data":"e8cfc70ba801b803b27a94e885af08724fe4268ee1b0835bb9ca95603ca2962a"} Oct 11 05:03:07 crc kubenswrapper[4651]: I1011 05:03:07.930573 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-64d4c8dfd9-ssxfc" Oct 11 05:03:09 crc kubenswrapper[4651]: I1011 05:03:09.900567 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-64d4c8dfd9-ssxfc" podStartSLOduration=4.058429622 podStartE2EDuration="6.90054014s" podCreationTimestamp="2025-10-11 05:03:03 +0000 UTC" firstStartedPulling="2025-10-11 05:03:04.428531377 +0000 UTC m=+705.324764173" lastFinishedPulling="2025-10-11 05:03:07.270641895 +0000 UTC m=+708.166874691" observedRunningTime="2025-10-11 05:03:07.950125593 +0000 UTC m=+708.846358389" watchObservedRunningTime="2025-10-11 05:03:09.90054014 +0000 UTC m=+710.796772936" Oct 11 05:03:09 crc kubenswrapper[4651]: I1011 05:03:09.943190 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-6ddcb68dc7-mhfj6" event={"ID":"3da924ac-510e-4c75-8e5d-2571c454a7a5","Type":"ContainerStarted","Data":"d40be26a9801132ff97570f12400fc8dcb30a077bd51ac454a89202adfc4a84b"} Oct 11 05:03:09 crc kubenswrapper[4651]: I1011 05:03:09.967176 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-6ddcb68dc7-mhfj6" podStartSLOduration=1.762744174 podStartE2EDuration="5.967160593s" podCreationTimestamp="2025-10-11 05:03:04 +0000 UTC" firstStartedPulling="2025-10-11 05:03:04.824428198 +0000 UTC m=+705.720660994" lastFinishedPulling="2025-10-11 05:03:09.028844617 +0000 UTC m=+709.925077413" observedRunningTime="2025-10-11 05:03:09.963602162 +0000 UTC m=+710.859834988" watchObservedRunningTime="2025-10-11 05:03:09.967160593 +0000 UTC m=+710.863393389" Oct 11 05:03:10 crc kubenswrapper[4651]: I1011 05:03:10.948013 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-6ddcb68dc7-mhfj6" Oct 11 05:03:16 crc kubenswrapper[4651]: I1011 05:03:16.310528 4651 patch_prober.go:28] interesting pod/machine-config-daemon-78jnv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 05:03:16 crc kubenswrapper[4651]: I1011 05:03:16.310867 4651 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 05:03:24 crc kubenswrapper[4651]: I1011 05:03:24.557864 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-6ddcb68dc7-mhfj6" Oct 11 05:03:44 crc kubenswrapper[4651]: I1011 05:03:44.183119 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-64d4c8dfd9-ssxfc" Oct 11 05:03:44 crc kubenswrapper[4651]: I1011 05:03:44.935777 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-64bf5d555-sxrwh"] Oct 11 05:03:44 crc kubenswrapper[4651]: I1011 05:03:44.937353 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-sxrwh" Oct 11 05:03:44 crc kubenswrapper[4651]: I1011 05:03:44.939702 4651 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Oct 11 05:03:44 crc kubenswrapper[4651]: I1011 05:03:44.940525 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-x9n5l"] Oct 11 05:03:44 crc kubenswrapper[4651]: I1011 05:03:44.941791 4651 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-pt5hj" Oct 11 05:03:44 crc kubenswrapper[4651]: I1011 05:03:44.943343 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-x9n5l" Oct 11 05:03:44 crc kubenswrapper[4651]: I1011 05:03:44.945230 4651 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Oct 11 05:03:44 crc kubenswrapper[4651]: I1011 05:03:44.945988 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Oct 11 05:03:44 crc kubenswrapper[4651]: I1011 05:03:44.957565 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-64bf5d555-sxrwh"] Oct 11 05:03:45 crc kubenswrapper[4651]: I1011 05:03:45.019592 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-j7mlx"] Oct 11 05:03:45 crc kubenswrapper[4651]: I1011 05:03:45.020431 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-j7mlx" Oct 11 05:03:45 crc kubenswrapper[4651]: I1011 05:03:45.022500 4651 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Oct 11 05:03:45 crc kubenswrapper[4651]: I1011 05:03:45.022793 4651 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Oct 11 05:03:45 crc kubenswrapper[4651]: I1011 05:03:45.022944 4651 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-75q55" Oct 11 05:03:45 crc kubenswrapper[4651]: I1011 05:03:45.024396 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Oct 11 05:03:45 crc kubenswrapper[4651]: I1011 05:03:45.034156 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-68d546b9d8-gbzvh"] Oct 11 05:03:45 crc kubenswrapper[4651]: I1011 05:03:45.035010 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-68d546b9d8-gbzvh" Oct 11 05:03:45 crc kubenswrapper[4651]: I1011 05:03:45.036511 4651 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Oct 11 05:03:45 crc kubenswrapper[4651]: I1011 05:03:45.050423 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-68d546b9d8-gbzvh"] Oct 11 05:03:45 crc kubenswrapper[4651]: I1011 05:03:45.067775 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/870fb108-5a7a-4ade-82fd-8cf3c09950b8-frr-sockets\") pod \"frr-k8s-x9n5l\" (UID: \"870fb108-5a7a-4ade-82fd-8cf3c09950b8\") " pod="metallb-system/frr-k8s-x9n5l" Oct 11 05:03:45 crc kubenswrapper[4651]: I1011 05:03:45.067980 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/870fb108-5a7a-4ade-82fd-8cf3c09950b8-frr-startup\") pod \"frr-k8s-x9n5l\" (UID: \"870fb108-5a7a-4ade-82fd-8cf3c09950b8\") " pod="metallb-system/frr-k8s-x9n5l" Oct 11 05:03:45 crc kubenswrapper[4651]: I1011 05:03:45.068116 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/870fb108-5a7a-4ade-82fd-8cf3c09950b8-metrics\") pod \"frr-k8s-x9n5l\" (UID: \"870fb108-5a7a-4ade-82fd-8cf3c09950b8\") " pod="metallb-system/frr-k8s-x9n5l" Oct 11 05:03:45 crc kubenswrapper[4651]: I1011 05:03:45.068312 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4a2881f0-834f-4e9d-8be5-5adb1f5feefd-cert\") pod \"frr-k8s-webhook-server-64bf5d555-sxrwh\" (UID: \"4a2881f0-834f-4e9d-8be5-5adb1f5feefd\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-sxrwh" Oct 11 05:03:45 crc kubenswrapper[4651]: I1011 05:03:45.068374 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rjcjg\" (UniqueName: \"kubernetes.io/projected/a8bd8e31-66a3-492c-8cc4-be80c4b0dcb6-kube-api-access-rjcjg\") pod \"speaker-j7mlx\" (UID: \"a8bd8e31-66a3-492c-8cc4-be80c4b0dcb6\") " pod="metallb-system/speaker-j7mlx" Oct 11 05:03:45 crc kubenswrapper[4651]: I1011 05:03:45.068441 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/870fb108-5a7a-4ade-82fd-8cf3c09950b8-frr-conf\") pod \"frr-k8s-x9n5l\" (UID: \"870fb108-5a7a-4ade-82fd-8cf3c09950b8\") " pod="metallb-system/frr-k8s-x9n5l" Oct 11 05:03:45 crc kubenswrapper[4651]: I1011 05:03:45.068489 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wpf79\" (UniqueName: \"kubernetes.io/projected/870fb108-5a7a-4ade-82fd-8cf3c09950b8-kube-api-access-wpf79\") pod \"frr-k8s-x9n5l\" (UID: \"870fb108-5a7a-4ade-82fd-8cf3c09950b8\") " pod="metallb-system/frr-k8s-x9n5l" Oct 11 05:03:45 crc kubenswrapper[4651]: I1011 05:03:45.068516 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/a8bd8e31-66a3-492c-8cc4-be80c4b0dcb6-memberlist\") pod \"speaker-j7mlx\" (UID: \"a8bd8e31-66a3-492c-8cc4-be80c4b0dcb6\") " pod="metallb-system/speaker-j7mlx" Oct 11 05:03:45 crc kubenswrapper[4651]: I1011 05:03:45.068574 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/870fb108-5a7a-4ade-82fd-8cf3c09950b8-reloader\") pod \"frr-k8s-x9n5l\" (UID: \"870fb108-5a7a-4ade-82fd-8cf3c09950b8\") " pod="metallb-system/frr-k8s-x9n5l" Oct 11 05:03:45 crc kubenswrapper[4651]: I1011 05:03:45.068696 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a8bd8e31-66a3-492c-8cc4-be80c4b0dcb6-metrics-certs\") pod \"speaker-j7mlx\" (UID: \"a8bd8e31-66a3-492c-8cc4-be80c4b0dcb6\") " pod="metallb-system/speaker-j7mlx" Oct 11 05:03:45 crc kubenswrapper[4651]: I1011 05:03:45.068780 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/a8bd8e31-66a3-492c-8cc4-be80c4b0dcb6-metallb-excludel2\") pod \"speaker-j7mlx\" (UID: \"a8bd8e31-66a3-492c-8cc4-be80c4b0dcb6\") " pod="metallb-system/speaker-j7mlx" Oct 11 05:03:45 crc kubenswrapper[4651]: I1011 05:03:45.068875 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/870fb108-5a7a-4ade-82fd-8cf3c09950b8-metrics-certs\") pod \"frr-k8s-x9n5l\" (UID: \"870fb108-5a7a-4ade-82fd-8cf3c09950b8\") " pod="metallb-system/frr-k8s-x9n5l" Oct 11 05:03:45 crc kubenswrapper[4651]: I1011 05:03:45.068966 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4q6fn\" (UniqueName: \"kubernetes.io/projected/4a2881f0-834f-4e9d-8be5-5adb1f5feefd-kube-api-access-4q6fn\") pod \"frr-k8s-webhook-server-64bf5d555-sxrwh\" (UID: \"4a2881f0-834f-4e9d-8be5-5adb1f5feefd\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-sxrwh" Oct 11 05:03:45 crc kubenswrapper[4651]: I1011 05:03:45.170716 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/870fb108-5a7a-4ade-82fd-8cf3c09950b8-reloader\") pod \"frr-k8s-x9n5l\" (UID: \"870fb108-5a7a-4ade-82fd-8cf3c09950b8\") " pod="metallb-system/frr-k8s-x9n5l" Oct 11 05:03:45 crc kubenswrapper[4651]: I1011 05:03:45.171057 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/a8bd8e31-66a3-492c-8cc4-be80c4b0dcb6-metallb-excludel2\") pod \"speaker-j7mlx\" (UID: \"a8bd8e31-66a3-492c-8cc4-be80c4b0dcb6\") " pod="metallb-system/speaker-j7mlx" Oct 11 05:03:45 crc kubenswrapper[4651]: I1011 05:03:45.172080 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a8bd8e31-66a3-492c-8cc4-be80c4b0dcb6-metrics-certs\") pod \"speaker-j7mlx\" (UID: \"a8bd8e31-66a3-492c-8cc4-be80c4b0dcb6\") " pod="metallb-system/speaker-j7mlx" Oct 11 05:03:45 crc kubenswrapper[4651]: I1011 05:03:45.172333 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/870fb108-5a7a-4ade-82fd-8cf3c09950b8-metrics-certs\") pod \"frr-k8s-x9n5l\" (UID: \"870fb108-5a7a-4ade-82fd-8cf3c09950b8\") " pod="metallb-system/frr-k8s-x9n5l" Oct 11 05:03:45 crc kubenswrapper[4651]: I1011 05:03:45.173441 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4q6fn\" (UniqueName: \"kubernetes.io/projected/4a2881f0-834f-4e9d-8be5-5adb1f5feefd-kube-api-access-4q6fn\") pod \"frr-k8s-webhook-server-64bf5d555-sxrwh\" (UID: \"4a2881f0-834f-4e9d-8be5-5adb1f5feefd\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-sxrwh" Oct 11 05:03:45 crc kubenswrapper[4651]: I1011 05:03:45.173585 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gkxqh\" (UniqueName: \"kubernetes.io/projected/b23b74fb-01aa-4027-978b-ef5fccb6a023-kube-api-access-gkxqh\") pod \"controller-68d546b9d8-gbzvh\" (UID: \"b23b74fb-01aa-4027-978b-ef5fccb6a023\") " pod="metallb-system/controller-68d546b9d8-gbzvh" Oct 11 05:03:45 crc kubenswrapper[4651]: I1011 05:03:45.173711 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/870fb108-5a7a-4ade-82fd-8cf3c09950b8-frr-sockets\") pod \"frr-k8s-x9n5l\" (UID: \"870fb108-5a7a-4ade-82fd-8cf3c09950b8\") " pod="metallb-system/frr-k8s-x9n5l" Oct 11 05:03:45 crc kubenswrapper[4651]: I1011 05:03:45.173816 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/870fb108-5a7a-4ade-82fd-8cf3c09950b8-frr-startup\") pod \"frr-k8s-x9n5l\" (UID: \"870fb108-5a7a-4ade-82fd-8cf3c09950b8\") " pod="metallb-system/frr-k8s-x9n5l" Oct 11 05:03:45 crc kubenswrapper[4651]: I1011 05:03:45.173983 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/870fb108-5a7a-4ade-82fd-8cf3c09950b8-metrics\") pod \"frr-k8s-x9n5l\" (UID: \"870fb108-5a7a-4ade-82fd-8cf3c09950b8\") " pod="metallb-system/frr-k8s-x9n5l" Oct 11 05:03:45 crc kubenswrapper[4651]: I1011 05:03:45.174098 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b23b74fb-01aa-4027-978b-ef5fccb6a023-cert\") pod \"controller-68d546b9d8-gbzvh\" (UID: \"b23b74fb-01aa-4027-978b-ef5fccb6a023\") " pod="metallb-system/controller-68d546b9d8-gbzvh" Oct 11 05:03:45 crc kubenswrapper[4651]: I1011 05:03:45.174266 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b23b74fb-01aa-4027-978b-ef5fccb6a023-metrics-certs\") pod \"controller-68d546b9d8-gbzvh\" (UID: \"b23b74fb-01aa-4027-978b-ef5fccb6a023\") " pod="metallb-system/controller-68d546b9d8-gbzvh" Oct 11 05:03:45 crc kubenswrapper[4651]: I1011 05:03:45.174377 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4a2881f0-834f-4e9d-8be5-5adb1f5feefd-cert\") pod \"frr-k8s-webhook-server-64bf5d555-sxrwh\" (UID: \"4a2881f0-834f-4e9d-8be5-5adb1f5feefd\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-sxrwh" Oct 11 05:03:45 crc kubenswrapper[4651]: I1011 05:03:45.174545 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rjcjg\" (UniqueName: \"kubernetes.io/projected/a8bd8e31-66a3-492c-8cc4-be80c4b0dcb6-kube-api-access-rjcjg\") pod \"speaker-j7mlx\" (UID: \"a8bd8e31-66a3-492c-8cc4-be80c4b0dcb6\") " pod="metallb-system/speaker-j7mlx" Oct 11 05:03:45 crc kubenswrapper[4651]: I1011 05:03:45.174649 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/870fb108-5a7a-4ade-82fd-8cf3c09950b8-frr-conf\") pod \"frr-k8s-x9n5l\" (UID: \"870fb108-5a7a-4ade-82fd-8cf3c09950b8\") " pod="metallb-system/frr-k8s-x9n5l" Oct 11 05:03:45 crc kubenswrapper[4651]: I1011 05:03:45.174756 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wpf79\" (UniqueName: \"kubernetes.io/projected/870fb108-5a7a-4ade-82fd-8cf3c09950b8-kube-api-access-wpf79\") pod \"frr-k8s-x9n5l\" (UID: \"870fb108-5a7a-4ade-82fd-8cf3c09950b8\") " pod="metallb-system/frr-k8s-x9n5l" Oct 11 05:03:45 crc kubenswrapper[4651]: I1011 05:03:45.174881 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/a8bd8e31-66a3-492c-8cc4-be80c4b0dcb6-memberlist\") pod \"speaker-j7mlx\" (UID: \"a8bd8e31-66a3-492c-8cc4-be80c4b0dcb6\") " pod="metallb-system/speaker-j7mlx" Oct 11 05:03:45 crc kubenswrapper[4651]: E1011 05:03:45.172287 4651 secret.go:188] Couldn't get secret metallb-system/speaker-certs-secret: secret "speaker-certs-secret" not found Oct 11 05:03:45 crc kubenswrapper[4651]: E1011 05:03:45.175131 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a8bd8e31-66a3-492c-8cc4-be80c4b0dcb6-metrics-certs podName:a8bd8e31-66a3-492c-8cc4-be80c4b0dcb6 nodeName:}" failed. No retries permitted until 2025-10-11 05:03:45.675112458 +0000 UTC m=+746.571345274 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/a8bd8e31-66a3-492c-8cc4-be80c4b0dcb6-metrics-certs") pod "speaker-j7mlx" (UID: "a8bd8e31-66a3-492c-8cc4-be80c4b0dcb6") : secret "speaker-certs-secret" not found Oct 11 05:03:45 crc kubenswrapper[4651]: I1011 05:03:45.174017 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/870fb108-5a7a-4ade-82fd-8cf3c09950b8-frr-sockets\") pod \"frr-k8s-x9n5l\" (UID: \"870fb108-5a7a-4ade-82fd-8cf3c09950b8\") " pod="metallb-system/frr-k8s-x9n5l" Oct 11 05:03:45 crc kubenswrapper[4651]: I1011 05:03:45.174714 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/870fb108-5a7a-4ade-82fd-8cf3c09950b8-frr-startup\") pod \"frr-k8s-x9n5l\" (UID: \"870fb108-5a7a-4ade-82fd-8cf3c09950b8\") " pod="metallb-system/frr-k8s-x9n5l" Oct 11 05:03:45 crc kubenswrapper[4651]: I1011 05:03:45.172019 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/a8bd8e31-66a3-492c-8cc4-be80c4b0dcb6-metallb-excludel2\") pod \"speaker-j7mlx\" (UID: \"a8bd8e31-66a3-492c-8cc4-be80c4b0dcb6\") " pod="metallb-system/speaker-j7mlx" Oct 11 05:03:45 crc kubenswrapper[4651]: I1011 05:03:45.175138 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/870fb108-5a7a-4ade-82fd-8cf3c09950b8-frr-conf\") pod \"frr-k8s-x9n5l\" (UID: \"870fb108-5a7a-4ade-82fd-8cf3c09950b8\") " pod="metallb-system/frr-k8s-x9n5l" Oct 11 05:03:45 crc kubenswrapper[4651]: I1011 05:03:45.171257 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/870fb108-5a7a-4ade-82fd-8cf3c09950b8-reloader\") pod \"frr-k8s-x9n5l\" (UID: \"870fb108-5a7a-4ade-82fd-8cf3c09950b8\") " pod="metallb-system/frr-k8s-x9n5l" Oct 11 05:03:45 crc kubenswrapper[4651]: I1011 05:03:45.174454 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/870fb108-5a7a-4ade-82fd-8cf3c09950b8-metrics\") pod \"frr-k8s-x9n5l\" (UID: \"870fb108-5a7a-4ade-82fd-8cf3c09950b8\") " pod="metallb-system/frr-k8s-x9n5l" Oct 11 05:03:45 crc kubenswrapper[4651]: E1011 05:03:45.175468 4651 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Oct 11 05:03:45 crc kubenswrapper[4651]: E1011 05:03:45.175514 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a8bd8e31-66a3-492c-8cc4-be80c4b0dcb6-memberlist podName:a8bd8e31-66a3-492c-8cc4-be80c4b0dcb6 nodeName:}" failed. No retries permitted until 2025-10-11 05:03:45.675498887 +0000 UTC m=+746.571731683 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/a8bd8e31-66a3-492c-8cc4-be80c4b0dcb6-memberlist") pod "speaker-j7mlx" (UID: "a8bd8e31-66a3-492c-8cc4-be80c4b0dcb6") : secret "metallb-memberlist" not found Oct 11 05:03:45 crc kubenswrapper[4651]: I1011 05:03:45.180450 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/870fb108-5a7a-4ade-82fd-8cf3c09950b8-metrics-certs\") pod \"frr-k8s-x9n5l\" (UID: \"870fb108-5a7a-4ade-82fd-8cf3c09950b8\") " pod="metallb-system/frr-k8s-x9n5l" Oct 11 05:03:45 crc kubenswrapper[4651]: I1011 05:03:45.182771 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4a2881f0-834f-4e9d-8be5-5adb1f5feefd-cert\") pod \"frr-k8s-webhook-server-64bf5d555-sxrwh\" (UID: \"4a2881f0-834f-4e9d-8be5-5adb1f5feefd\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-sxrwh" Oct 11 05:03:45 crc kubenswrapper[4651]: I1011 05:03:45.193412 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rjcjg\" (UniqueName: \"kubernetes.io/projected/a8bd8e31-66a3-492c-8cc4-be80c4b0dcb6-kube-api-access-rjcjg\") pod \"speaker-j7mlx\" (UID: \"a8bd8e31-66a3-492c-8cc4-be80c4b0dcb6\") " pod="metallb-system/speaker-j7mlx" Oct 11 05:03:45 crc kubenswrapper[4651]: I1011 05:03:45.197439 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wpf79\" (UniqueName: \"kubernetes.io/projected/870fb108-5a7a-4ade-82fd-8cf3c09950b8-kube-api-access-wpf79\") pod \"frr-k8s-x9n5l\" (UID: \"870fb108-5a7a-4ade-82fd-8cf3c09950b8\") " pod="metallb-system/frr-k8s-x9n5l" Oct 11 05:03:45 crc kubenswrapper[4651]: I1011 05:03:45.198459 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4q6fn\" (UniqueName: \"kubernetes.io/projected/4a2881f0-834f-4e9d-8be5-5adb1f5feefd-kube-api-access-4q6fn\") pod \"frr-k8s-webhook-server-64bf5d555-sxrwh\" (UID: \"4a2881f0-834f-4e9d-8be5-5adb1f5feefd\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-sxrwh" Oct 11 05:03:45 crc kubenswrapper[4651]: I1011 05:03:45.257067 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-sxrwh" Oct 11 05:03:45 crc kubenswrapper[4651]: I1011 05:03:45.267358 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-x9n5l" Oct 11 05:03:45 crc kubenswrapper[4651]: I1011 05:03:45.276812 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gkxqh\" (UniqueName: \"kubernetes.io/projected/b23b74fb-01aa-4027-978b-ef5fccb6a023-kube-api-access-gkxqh\") pod \"controller-68d546b9d8-gbzvh\" (UID: \"b23b74fb-01aa-4027-978b-ef5fccb6a023\") " pod="metallb-system/controller-68d546b9d8-gbzvh" Oct 11 05:03:45 crc kubenswrapper[4651]: I1011 05:03:45.276895 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b23b74fb-01aa-4027-978b-ef5fccb6a023-cert\") pod \"controller-68d546b9d8-gbzvh\" (UID: \"b23b74fb-01aa-4027-978b-ef5fccb6a023\") " pod="metallb-system/controller-68d546b9d8-gbzvh" Oct 11 05:03:45 crc kubenswrapper[4651]: I1011 05:03:45.276919 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b23b74fb-01aa-4027-978b-ef5fccb6a023-metrics-certs\") pod \"controller-68d546b9d8-gbzvh\" (UID: \"b23b74fb-01aa-4027-978b-ef5fccb6a023\") " pod="metallb-system/controller-68d546b9d8-gbzvh" Oct 11 05:03:45 crc kubenswrapper[4651]: I1011 05:03:45.282737 4651 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Oct 11 05:03:45 crc kubenswrapper[4651]: I1011 05:03:45.286379 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b23b74fb-01aa-4027-978b-ef5fccb6a023-metrics-certs\") pod \"controller-68d546b9d8-gbzvh\" (UID: \"b23b74fb-01aa-4027-978b-ef5fccb6a023\") " pod="metallb-system/controller-68d546b9d8-gbzvh" Oct 11 05:03:45 crc kubenswrapper[4651]: I1011 05:03:45.292498 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b23b74fb-01aa-4027-978b-ef5fccb6a023-cert\") pod \"controller-68d546b9d8-gbzvh\" (UID: \"b23b74fb-01aa-4027-978b-ef5fccb6a023\") " pod="metallb-system/controller-68d546b9d8-gbzvh" Oct 11 05:03:45 crc kubenswrapper[4651]: I1011 05:03:45.306450 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gkxqh\" (UniqueName: \"kubernetes.io/projected/b23b74fb-01aa-4027-978b-ef5fccb6a023-kube-api-access-gkxqh\") pod \"controller-68d546b9d8-gbzvh\" (UID: \"b23b74fb-01aa-4027-978b-ef5fccb6a023\") " pod="metallb-system/controller-68d546b9d8-gbzvh" Oct 11 05:03:45 crc kubenswrapper[4651]: I1011 05:03:45.351450 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-68d546b9d8-gbzvh" Oct 11 05:03:45 crc kubenswrapper[4651]: I1011 05:03:45.562502 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-68d546b9d8-gbzvh"] Oct 11 05:03:45 crc kubenswrapper[4651]: W1011 05:03:45.566709 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb23b74fb_01aa_4027_978b_ef5fccb6a023.slice/crio-b2f109c12662d6c5b15b038b51bd9f32541df6063f3ea7069e0c6e21f015ea27 WatchSource:0}: Error finding container b2f109c12662d6c5b15b038b51bd9f32541df6063f3ea7069e0c6e21f015ea27: Status 404 returned error can't find the container with id b2f109c12662d6c5b15b038b51bd9f32541df6063f3ea7069e0c6e21f015ea27 Oct 11 05:03:45 crc kubenswrapper[4651]: I1011 05:03:45.687000 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/a8bd8e31-66a3-492c-8cc4-be80c4b0dcb6-memberlist\") pod \"speaker-j7mlx\" (UID: \"a8bd8e31-66a3-492c-8cc4-be80c4b0dcb6\") " pod="metallb-system/speaker-j7mlx" Oct 11 05:03:45 crc kubenswrapper[4651]: I1011 05:03:45.687102 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a8bd8e31-66a3-492c-8cc4-be80c4b0dcb6-metrics-certs\") pod \"speaker-j7mlx\" (UID: \"a8bd8e31-66a3-492c-8cc4-be80c4b0dcb6\") " pod="metallb-system/speaker-j7mlx" Oct 11 05:03:45 crc kubenswrapper[4651]: E1011 05:03:45.687230 4651 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Oct 11 05:03:45 crc kubenswrapper[4651]: E1011 05:03:45.687355 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a8bd8e31-66a3-492c-8cc4-be80c4b0dcb6-memberlist podName:a8bd8e31-66a3-492c-8cc4-be80c4b0dcb6 nodeName:}" failed. No retries permitted until 2025-10-11 05:03:46.687315187 +0000 UTC m=+747.583548074 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/a8bd8e31-66a3-492c-8cc4-be80c4b0dcb6-memberlist") pod "speaker-j7mlx" (UID: "a8bd8e31-66a3-492c-8cc4-be80c4b0dcb6") : secret "metallb-memberlist" not found Oct 11 05:03:45 crc kubenswrapper[4651]: I1011 05:03:45.698738 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a8bd8e31-66a3-492c-8cc4-be80c4b0dcb6-metrics-certs\") pod \"speaker-j7mlx\" (UID: \"a8bd8e31-66a3-492c-8cc4-be80c4b0dcb6\") " pod="metallb-system/speaker-j7mlx" Oct 11 05:03:45 crc kubenswrapper[4651]: I1011 05:03:45.707896 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-64bf5d555-sxrwh"] Oct 11 05:03:45 crc kubenswrapper[4651]: W1011 05:03:45.716653 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4a2881f0_834f_4e9d_8be5_5adb1f5feefd.slice/crio-78be8b477e9de414ac8f4178cb6503a06a21cc39f9f66da1556d44650ff338dd WatchSource:0}: Error finding container 78be8b477e9de414ac8f4178cb6503a06a21cc39f9f66da1556d44650ff338dd: Status 404 returned error can't find the container with id 78be8b477e9de414ac8f4178cb6503a06a21cc39f9f66da1556d44650ff338dd Oct 11 05:03:46 crc kubenswrapper[4651]: I1011 05:03:46.165602 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-x9n5l" event={"ID":"870fb108-5a7a-4ade-82fd-8cf3c09950b8","Type":"ContainerStarted","Data":"da016ad24ef9584689240fddb06b34f8d7411f02ae143e2625c54797ca49de50"} Oct 11 05:03:46 crc kubenswrapper[4651]: I1011 05:03:46.167636 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-68d546b9d8-gbzvh" event={"ID":"b23b74fb-01aa-4027-978b-ef5fccb6a023","Type":"ContainerStarted","Data":"426903d1e520d3ca601f5b56127c910dec85e6980f827f7d89d5cbff245c65fa"} Oct 11 05:03:46 crc kubenswrapper[4651]: I1011 05:03:46.167680 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-68d546b9d8-gbzvh" event={"ID":"b23b74fb-01aa-4027-978b-ef5fccb6a023","Type":"ContainerStarted","Data":"bf1e62ae18160782bf837e233258bc0a57c0aaf46f6c65868ddd710ffc0c2223"} Oct 11 05:03:46 crc kubenswrapper[4651]: I1011 05:03:46.167691 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-68d546b9d8-gbzvh" event={"ID":"b23b74fb-01aa-4027-978b-ef5fccb6a023","Type":"ContainerStarted","Data":"b2f109c12662d6c5b15b038b51bd9f32541df6063f3ea7069e0c6e21f015ea27"} Oct 11 05:03:46 crc kubenswrapper[4651]: I1011 05:03:46.167868 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-68d546b9d8-gbzvh" Oct 11 05:03:46 crc kubenswrapper[4651]: I1011 05:03:46.168781 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-sxrwh" event={"ID":"4a2881f0-834f-4e9d-8be5-5adb1f5feefd","Type":"ContainerStarted","Data":"78be8b477e9de414ac8f4178cb6503a06a21cc39f9f66da1556d44650ff338dd"} Oct 11 05:03:46 crc kubenswrapper[4651]: I1011 05:03:46.190881 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-68d546b9d8-gbzvh" podStartSLOduration=1.190849448 podStartE2EDuration="1.190849448s" podCreationTimestamp="2025-10-11 05:03:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 05:03:46.187199146 +0000 UTC m=+747.083431962" watchObservedRunningTime="2025-10-11 05:03:46.190849448 +0000 UTC m=+747.087082244" Oct 11 05:03:46 crc kubenswrapper[4651]: I1011 05:03:46.310306 4651 patch_prober.go:28] interesting pod/machine-config-daemon-78jnv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 05:03:46 crc kubenswrapper[4651]: I1011 05:03:46.310379 4651 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 05:03:46 crc kubenswrapper[4651]: I1011 05:03:46.587114 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-2hhkn"] Oct 11 05:03:46 crc kubenswrapper[4651]: I1011 05:03:46.587659 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-2hhkn" podUID="2797c45d-e1d7-44d7-b936-44048593f540" containerName="controller-manager" containerID="cri-o://7a26ece8fa97124105a03af84610f81b09c42277ba272cb10e79f7159dd4e72a" gracePeriod=30 Oct 11 05:03:46 crc kubenswrapper[4651]: I1011 05:03:46.690868 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-pkhrx"] Oct 11 05:03:46 crc kubenswrapper[4651]: I1011 05:03:46.691231 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-pkhrx" podUID="27505683-e595-4855-8a29-aceee78542b6" containerName="route-controller-manager" containerID="cri-o://48ef3c3c050ae4f5d07a478caffe75b28d9ca82f6819742e41ae4b9210782c2e" gracePeriod=30 Oct 11 05:03:46 crc kubenswrapper[4651]: I1011 05:03:46.703925 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/a8bd8e31-66a3-492c-8cc4-be80c4b0dcb6-memberlist\") pod \"speaker-j7mlx\" (UID: \"a8bd8e31-66a3-492c-8cc4-be80c4b0dcb6\") " pod="metallb-system/speaker-j7mlx" Oct 11 05:03:46 crc kubenswrapper[4651]: I1011 05:03:46.744398 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/a8bd8e31-66a3-492c-8cc4-be80c4b0dcb6-memberlist\") pod \"speaker-j7mlx\" (UID: \"a8bd8e31-66a3-492c-8cc4-be80c4b0dcb6\") " pod="metallb-system/speaker-j7mlx" Oct 11 05:03:46 crc kubenswrapper[4651]: I1011 05:03:46.838805 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-j7mlx" Oct 11 05:03:47 crc kubenswrapper[4651]: I1011 05:03:47.017581 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-2hhkn" Oct 11 05:03:47 crc kubenswrapper[4651]: I1011 05:03:47.111949 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2797c45d-e1d7-44d7-b936-44048593f540-serving-cert\") pod \"2797c45d-e1d7-44d7-b936-44048593f540\" (UID: \"2797c45d-e1d7-44d7-b936-44048593f540\") " Oct 11 05:03:47 crc kubenswrapper[4651]: I1011 05:03:47.111988 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2797c45d-e1d7-44d7-b936-44048593f540-client-ca\") pod \"2797c45d-e1d7-44d7-b936-44048593f540\" (UID: \"2797c45d-e1d7-44d7-b936-44048593f540\") " Oct 11 05:03:47 crc kubenswrapper[4651]: I1011 05:03:47.112028 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/2797c45d-e1d7-44d7-b936-44048593f540-proxy-ca-bundles\") pod \"2797c45d-e1d7-44d7-b936-44048593f540\" (UID: \"2797c45d-e1d7-44d7-b936-44048593f540\") " Oct 11 05:03:47 crc kubenswrapper[4651]: I1011 05:03:47.112099 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2797c45d-e1d7-44d7-b936-44048593f540-config\") pod \"2797c45d-e1d7-44d7-b936-44048593f540\" (UID: \"2797c45d-e1d7-44d7-b936-44048593f540\") " Oct 11 05:03:47 crc kubenswrapper[4651]: I1011 05:03:47.112131 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p5r9g\" (UniqueName: \"kubernetes.io/projected/2797c45d-e1d7-44d7-b936-44048593f540-kube-api-access-p5r9g\") pod \"2797c45d-e1d7-44d7-b936-44048593f540\" (UID: \"2797c45d-e1d7-44d7-b936-44048593f540\") " Oct 11 05:03:47 crc kubenswrapper[4651]: I1011 05:03:47.114260 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2797c45d-e1d7-44d7-b936-44048593f540-config" (OuterVolumeSpecName: "config") pod "2797c45d-e1d7-44d7-b936-44048593f540" (UID: "2797c45d-e1d7-44d7-b936-44048593f540"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:03:47 crc kubenswrapper[4651]: I1011 05:03:47.114450 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2797c45d-e1d7-44d7-b936-44048593f540-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "2797c45d-e1d7-44d7-b936-44048593f540" (UID: "2797c45d-e1d7-44d7-b936-44048593f540"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:03:47 crc kubenswrapper[4651]: I1011 05:03:47.114898 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2797c45d-e1d7-44d7-b936-44048593f540-client-ca" (OuterVolumeSpecName: "client-ca") pod "2797c45d-e1d7-44d7-b936-44048593f540" (UID: "2797c45d-e1d7-44d7-b936-44048593f540"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:03:47 crc kubenswrapper[4651]: I1011 05:03:47.119599 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2797c45d-e1d7-44d7-b936-44048593f540-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "2797c45d-e1d7-44d7-b936-44048593f540" (UID: "2797c45d-e1d7-44d7-b936-44048593f540"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:03:47 crc kubenswrapper[4651]: I1011 05:03:47.123992 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2797c45d-e1d7-44d7-b936-44048593f540-kube-api-access-p5r9g" (OuterVolumeSpecName: "kube-api-access-p5r9g") pod "2797c45d-e1d7-44d7-b936-44048593f540" (UID: "2797c45d-e1d7-44d7-b936-44048593f540"). InnerVolumeSpecName "kube-api-access-p5r9g". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:03:47 crc kubenswrapper[4651]: I1011 05:03:47.150276 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-pkhrx" Oct 11 05:03:47 crc kubenswrapper[4651]: I1011 05:03:47.182979 4651 generic.go:334] "Generic (PLEG): container finished" podID="27505683-e595-4855-8a29-aceee78542b6" containerID="48ef3c3c050ae4f5d07a478caffe75b28d9ca82f6819742e41ae4b9210782c2e" exitCode=0 Oct 11 05:03:47 crc kubenswrapper[4651]: I1011 05:03:47.183052 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-pkhrx" Oct 11 05:03:47 crc kubenswrapper[4651]: I1011 05:03:47.183040 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-pkhrx" event={"ID":"27505683-e595-4855-8a29-aceee78542b6","Type":"ContainerDied","Data":"48ef3c3c050ae4f5d07a478caffe75b28d9ca82f6819742e41ae4b9210782c2e"} Oct 11 05:03:47 crc kubenswrapper[4651]: I1011 05:03:47.183131 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-pkhrx" event={"ID":"27505683-e595-4855-8a29-aceee78542b6","Type":"ContainerDied","Data":"ad8f515568c1c63c51315eb3691dcc49eb9d576df74d6d7498aa8f5710f827e1"} Oct 11 05:03:47 crc kubenswrapper[4651]: I1011 05:03:47.183157 4651 scope.go:117] "RemoveContainer" containerID="48ef3c3c050ae4f5d07a478caffe75b28d9ca82f6819742e41ae4b9210782c2e" Oct 11 05:03:47 crc kubenswrapper[4651]: I1011 05:03:47.187490 4651 generic.go:334] "Generic (PLEG): container finished" podID="2797c45d-e1d7-44d7-b936-44048593f540" containerID="7a26ece8fa97124105a03af84610f81b09c42277ba272cb10e79f7159dd4e72a" exitCode=0 Oct 11 05:03:47 crc kubenswrapper[4651]: I1011 05:03:47.187623 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-2hhkn" Oct 11 05:03:47 crc kubenswrapper[4651]: I1011 05:03:47.187634 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-2hhkn" event={"ID":"2797c45d-e1d7-44d7-b936-44048593f540","Type":"ContainerDied","Data":"7a26ece8fa97124105a03af84610f81b09c42277ba272cb10e79f7159dd4e72a"} Oct 11 05:03:47 crc kubenswrapper[4651]: I1011 05:03:47.187676 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-2hhkn" event={"ID":"2797c45d-e1d7-44d7-b936-44048593f540","Type":"ContainerDied","Data":"f0eb6fa8640c101b0068de5b4f8a8204955d3bbda560a4f1b048280bffe2774b"} Oct 11 05:03:47 crc kubenswrapper[4651]: I1011 05:03:47.199929 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-j7mlx" event={"ID":"a8bd8e31-66a3-492c-8cc4-be80c4b0dcb6","Type":"ContainerStarted","Data":"b6a00173279de198a7322b2772650355ecf5ad481bf1c1762707a13bbded74c5"} Oct 11 05:03:47 crc kubenswrapper[4651]: I1011 05:03:47.200015 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-j7mlx" event={"ID":"a8bd8e31-66a3-492c-8cc4-be80c4b0dcb6","Type":"ContainerStarted","Data":"f0b1f65f26adae94beaef369f2ed736b3584b36f8abc65c7d62e90b94d27f711"} Oct 11 05:03:47 crc kubenswrapper[4651]: I1011 05:03:47.213400 4651 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2797c45d-e1d7-44d7-b936-44048593f540-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 11 05:03:47 crc kubenswrapper[4651]: I1011 05:03:47.213426 4651 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2797c45d-e1d7-44d7-b936-44048593f540-client-ca\") on node \"crc\" DevicePath \"\"" Oct 11 05:03:47 crc kubenswrapper[4651]: I1011 05:03:47.213437 4651 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/2797c45d-e1d7-44d7-b936-44048593f540-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 11 05:03:47 crc kubenswrapper[4651]: I1011 05:03:47.213448 4651 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2797c45d-e1d7-44d7-b936-44048593f540-config\") on node \"crc\" DevicePath \"\"" Oct 11 05:03:47 crc kubenswrapper[4651]: I1011 05:03:47.213459 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p5r9g\" (UniqueName: \"kubernetes.io/projected/2797c45d-e1d7-44d7-b936-44048593f540-kube-api-access-p5r9g\") on node \"crc\" DevicePath \"\"" Oct 11 05:03:47 crc kubenswrapper[4651]: I1011 05:03:47.224728 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-2hhkn"] Oct 11 05:03:47 crc kubenswrapper[4651]: I1011 05:03:47.230429 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-2hhkn"] Oct 11 05:03:47 crc kubenswrapper[4651]: I1011 05:03:47.233027 4651 scope.go:117] "RemoveContainer" containerID="48ef3c3c050ae4f5d07a478caffe75b28d9ca82f6819742e41ae4b9210782c2e" Oct 11 05:03:47 crc kubenswrapper[4651]: E1011 05:03:47.233442 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"48ef3c3c050ae4f5d07a478caffe75b28d9ca82f6819742e41ae4b9210782c2e\": container with ID starting with 48ef3c3c050ae4f5d07a478caffe75b28d9ca82f6819742e41ae4b9210782c2e not found: ID does not exist" containerID="48ef3c3c050ae4f5d07a478caffe75b28d9ca82f6819742e41ae4b9210782c2e" Oct 11 05:03:47 crc kubenswrapper[4651]: I1011 05:03:47.233483 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"48ef3c3c050ae4f5d07a478caffe75b28d9ca82f6819742e41ae4b9210782c2e"} err="failed to get container status \"48ef3c3c050ae4f5d07a478caffe75b28d9ca82f6819742e41ae4b9210782c2e\": rpc error: code = NotFound desc = could not find container \"48ef3c3c050ae4f5d07a478caffe75b28d9ca82f6819742e41ae4b9210782c2e\": container with ID starting with 48ef3c3c050ae4f5d07a478caffe75b28d9ca82f6819742e41ae4b9210782c2e not found: ID does not exist" Oct 11 05:03:47 crc kubenswrapper[4651]: I1011 05:03:47.233505 4651 scope.go:117] "RemoveContainer" containerID="7a26ece8fa97124105a03af84610f81b09c42277ba272cb10e79f7159dd4e72a" Oct 11 05:03:47 crc kubenswrapper[4651]: I1011 05:03:47.259081 4651 scope.go:117] "RemoveContainer" containerID="7a26ece8fa97124105a03af84610f81b09c42277ba272cb10e79f7159dd4e72a" Oct 11 05:03:47 crc kubenswrapper[4651]: E1011 05:03:47.262252 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7a26ece8fa97124105a03af84610f81b09c42277ba272cb10e79f7159dd4e72a\": container with ID starting with 7a26ece8fa97124105a03af84610f81b09c42277ba272cb10e79f7159dd4e72a not found: ID does not exist" containerID="7a26ece8fa97124105a03af84610f81b09c42277ba272cb10e79f7159dd4e72a" Oct 11 05:03:47 crc kubenswrapper[4651]: I1011 05:03:47.262297 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7a26ece8fa97124105a03af84610f81b09c42277ba272cb10e79f7159dd4e72a"} err="failed to get container status \"7a26ece8fa97124105a03af84610f81b09c42277ba272cb10e79f7159dd4e72a\": rpc error: code = NotFound desc = could not find container \"7a26ece8fa97124105a03af84610f81b09c42277ba272cb10e79f7159dd4e72a\": container with ID starting with 7a26ece8fa97124105a03af84610f81b09c42277ba272cb10e79f7159dd4e72a not found: ID does not exist" Oct 11 05:03:47 crc kubenswrapper[4651]: I1011 05:03:47.314879 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/27505683-e595-4855-8a29-aceee78542b6-client-ca\") pod \"27505683-e595-4855-8a29-aceee78542b6\" (UID: \"27505683-e595-4855-8a29-aceee78542b6\") " Oct 11 05:03:47 crc kubenswrapper[4651]: I1011 05:03:47.314943 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8jnx4\" (UniqueName: \"kubernetes.io/projected/27505683-e595-4855-8a29-aceee78542b6-kube-api-access-8jnx4\") pod \"27505683-e595-4855-8a29-aceee78542b6\" (UID: \"27505683-e595-4855-8a29-aceee78542b6\") " Oct 11 05:03:47 crc kubenswrapper[4651]: I1011 05:03:47.315063 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/27505683-e595-4855-8a29-aceee78542b6-serving-cert\") pod \"27505683-e595-4855-8a29-aceee78542b6\" (UID: \"27505683-e595-4855-8a29-aceee78542b6\") " Oct 11 05:03:47 crc kubenswrapper[4651]: I1011 05:03:47.315095 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/27505683-e595-4855-8a29-aceee78542b6-config\") pod \"27505683-e595-4855-8a29-aceee78542b6\" (UID: \"27505683-e595-4855-8a29-aceee78542b6\") " Oct 11 05:03:47 crc kubenswrapper[4651]: I1011 05:03:47.316549 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/27505683-e595-4855-8a29-aceee78542b6-client-ca" (OuterVolumeSpecName: "client-ca") pod "27505683-e595-4855-8a29-aceee78542b6" (UID: "27505683-e595-4855-8a29-aceee78542b6"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:03:47 crc kubenswrapper[4651]: I1011 05:03:47.316738 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/27505683-e595-4855-8a29-aceee78542b6-config" (OuterVolumeSpecName: "config") pod "27505683-e595-4855-8a29-aceee78542b6" (UID: "27505683-e595-4855-8a29-aceee78542b6"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:03:47 crc kubenswrapper[4651]: I1011 05:03:47.320617 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/27505683-e595-4855-8a29-aceee78542b6-kube-api-access-8jnx4" (OuterVolumeSpecName: "kube-api-access-8jnx4") pod "27505683-e595-4855-8a29-aceee78542b6" (UID: "27505683-e595-4855-8a29-aceee78542b6"). InnerVolumeSpecName "kube-api-access-8jnx4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:03:47 crc kubenswrapper[4651]: I1011 05:03:47.333225 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/27505683-e595-4855-8a29-aceee78542b6-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "27505683-e595-4855-8a29-aceee78542b6" (UID: "27505683-e595-4855-8a29-aceee78542b6"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:03:47 crc kubenswrapper[4651]: I1011 05:03:47.416787 4651 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/27505683-e595-4855-8a29-aceee78542b6-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 11 05:03:47 crc kubenswrapper[4651]: I1011 05:03:47.416858 4651 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/27505683-e595-4855-8a29-aceee78542b6-config\") on node \"crc\" DevicePath \"\"" Oct 11 05:03:47 crc kubenswrapper[4651]: I1011 05:03:47.416871 4651 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/27505683-e595-4855-8a29-aceee78542b6-client-ca\") on node \"crc\" DevicePath \"\"" Oct 11 05:03:47 crc kubenswrapper[4651]: I1011 05:03:47.416883 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8jnx4\" (UniqueName: \"kubernetes.io/projected/27505683-e595-4855-8a29-aceee78542b6-kube-api-access-8jnx4\") on node \"crc\" DevicePath \"\"" Oct 11 05:03:47 crc kubenswrapper[4651]: I1011 05:03:47.514012 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-pkhrx"] Oct 11 05:03:47 crc kubenswrapper[4651]: I1011 05:03:47.518754 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-pkhrx"] Oct 11 05:03:47 crc kubenswrapper[4651]: I1011 05:03:47.877213 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="27505683-e595-4855-8a29-aceee78542b6" path="/var/lib/kubelet/pods/27505683-e595-4855-8a29-aceee78542b6/volumes" Oct 11 05:03:47 crc kubenswrapper[4651]: I1011 05:03:47.877719 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2797c45d-e1d7-44d7-b936-44048593f540" path="/var/lib/kubelet/pods/2797c45d-e1d7-44d7-b936-44048593f540/volumes" Oct 11 05:03:48 crc kubenswrapper[4651]: I1011 05:03:48.209219 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-j7mlx" event={"ID":"a8bd8e31-66a3-492c-8cc4-be80c4b0dcb6","Type":"ContainerStarted","Data":"a947b52ecc89d95c2de3c9fd138aa385841fbe27f34c75bd60e74b63c72a0802"} Oct 11 05:03:48 crc kubenswrapper[4651]: I1011 05:03:48.209291 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-j7mlx" Oct 11 05:03:48 crc kubenswrapper[4651]: I1011 05:03:48.237465 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-j7mlx" podStartSLOduration=3.237443496 podStartE2EDuration="3.237443496s" podCreationTimestamp="2025-10-11 05:03:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 05:03:48.221788139 +0000 UTC m=+749.118020945" watchObservedRunningTime="2025-10-11 05:03:48.237443496 +0000 UTC m=+749.133676292" Oct 11 05:03:48 crc kubenswrapper[4651]: I1011 05:03:48.749326 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-5bd7bbd845-tm7qg"] Oct 11 05:03:48 crc kubenswrapper[4651]: E1011 05:03:48.749654 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2797c45d-e1d7-44d7-b936-44048593f540" containerName="controller-manager" Oct 11 05:03:48 crc kubenswrapper[4651]: I1011 05:03:48.749673 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="2797c45d-e1d7-44d7-b936-44048593f540" containerName="controller-manager" Oct 11 05:03:48 crc kubenswrapper[4651]: E1011 05:03:48.749694 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27505683-e595-4855-8a29-aceee78542b6" containerName="route-controller-manager" Oct 11 05:03:48 crc kubenswrapper[4651]: I1011 05:03:48.749702 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="27505683-e595-4855-8a29-aceee78542b6" containerName="route-controller-manager" Oct 11 05:03:48 crc kubenswrapper[4651]: I1011 05:03:48.749814 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="27505683-e595-4855-8a29-aceee78542b6" containerName="route-controller-manager" Oct 11 05:03:48 crc kubenswrapper[4651]: I1011 05:03:48.749861 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="2797c45d-e1d7-44d7-b936-44048593f540" containerName="controller-manager" Oct 11 05:03:48 crc kubenswrapper[4651]: I1011 05:03:48.750347 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5bd7bbd845-tm7qg" Oct 11 05:03:48 crc kubenswrapper[4651]: I1011 05:03:48.752057 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Oct 11 05:03:48 crc kubenswrapper[4651]: I1011 05:03:48.752063 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Oct 11 05:03:48 crc kubenswrapper[4651]: I1011 05:03:48.752106 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Oct 11 05:03:48 crc kubenswrapper[4651]: I1011 05:03:48.752385 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5c5bfd767-77rl5"] Oct 11 05:03:48 crc kubenswrapper[4651]: I1011 05:03:48.753044 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5c5bfd767-77rl5" Oct 11 05:03:48 crc kubenswrapper[4651]: I1011 05:03:48.753089 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Oct 11 05:03:48 crc kubenswrapper[4651]: I1011 05:03:48.755211 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Oct 11 05:03:48 crc kubenswrapper[4651]: I1011 05:03:48.755406 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Oct 11 05:03:48 crc kubenswrapper[4651]: I1011 05:03:48.755580 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Oct 11 05:03:48 crc kubenswrapper[4651]: I1011 05:03:48.755720 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Oct 11 05:03:48 crc kubenswrapper[4651]: I1011 05:03:48.756534 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Oct 11 05:03:48 crc kubenswrapper[4651]: I1011 05:03:48.756636 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Oct 11 05:03:48 crc kubenswrapper[4651]: I1011 05:03:48.756906 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Oct 11 05:03:48 crc kubenswrapper[4651]: I1011 05:03:48.760844 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5c5bfd767-77rl5"] Oct 11 05:03:48 crc kubenswrapper[4651]: I1011 05:03:48.761855 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Oct 11 05:03:48 crc kubenswrapper[4651]: I1011 05:03:48.767673 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Oct 11 05:03:48 crc kubenswrapper[4651]: I1011 05:03:48.772109 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-5bd7bbd845-tm7qg"] Oct 11 05:03:48 crc kubenswrapper[4651]: I1011 05:03:48.832606 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fp5hd\" (UniqueName: \"kubernetes.io/projected/77f1bab3-a5bb-42c0-b8ba-d20b688d6247-kube-api-access-fp5hd\") pod \"controller-manager-5bd7bbd845-tm7qg\" (UID: \"77f1bab3-a5bb-42c0-b8ba-d20b688d6247\") " pod="openshift-controller-manager/controller-manager-5bd7bbd845-tm7qg" Oct 11 05:03:48 crc kubenswrapper[4651]: I1011 05:03:48.832662 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-frkwh\" (UniqueName: \"kubernetes.io/projected/167bd1f4-1446-4888-b0c6-fdc7827f6643-kube-api-access-frkwh\") pod \"route-controller-manager-5c5bfd767-77rl5\" (UID: \"167bd1f4-1446-4888-b0c6-fdc7827f6643\") " pod="openshift-route-controller-manager/route-controller-manager-5c5bfd767-77rl5" Oct 11 05:03:48 crc kubenswrapper[4651]: I1011 05:03:48.832690 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/77f1bab3-a5bb-42c0-b8ba-d20b688d6247-config\") pod \"controller-manager-5bd7bbd845-tm7qg\" (UID: \"77f1bab3-a5bb-42c0-b8ba-d20b688d6247\") " pod="openshift-controller-manager/controller-manager-5bd7bbd845-tm7qg" Oct 11 05:03:48 crc kubenswrapper[4651]: I1011 05:03:48.832705 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/167bd1f4-1446-4888-b0c6-fdc7827f6643-client-ca\") pod \"route-controller-manager-5c5bfd767-77rl5\" (UID: \"167bd1f4-1446-4888-b0c6-fdc7827f6643\") " pod="openshift-route-controller-manager/route-controller-manager-5c5bfd767-77rl5" Oct 11 05:03:48 crc kubenswrapper[4651]: I1011 05:03:48.832722 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/77f1bab3-a5bb-42c0-b8ba-d20b688d6247-serving-cert\") pod \"controller-manager-5bd7bbd845-tm7qg\" (UID: \"77f1bab3-a5bb-42c0-b8ba-d20b688d6247\") " pod="openshift-controller-manager/controller-manager-5bd7bbd845-tm7qg" Oct 11 05:03:48 crc kubenswrapper[4651]: I1011 05:03:48.832744 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/77f1bab3-a5bb-42c0-b8ba-d20b688d6247-proxy-ca-bundles\") pod \"controller-manager-5bd7bbd845-tm7qg\" (UID: \"77f1bab3-a5bb-42c0-b8ba-d20b688d6247\") " pod="openshift-controller-manager/controller-manager-5bd7bbd845-tm7qg" Oct 11 05:03:48 crc kubenswrapper[4651]: I1011 05:03:48.832780 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/77f1bab3-a5bb-42c0-b8ba-d20b688d6247-client-ca\") pod \"controller-manager-5bd7bbd845-tm7qg\" (UID: \"77f1bab3-a5bb-42c0-b8ba-d20b688d6247\") " pod="openshift-controller-manager/controller-manager-5bd7bbd845-tm7qg" Oct 11 05:03:48 crc kubenswrapper[4651]: I1011 05:03:48.832797 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/167bd1f4-1446-4888-b0c6-fdc7827f6643-config\") pod \"route-controller-manager-5c5bfd767-77rl5\" (UID: \"167bd1f4-1446-4888-b0c6-fdc7827f6643\") " pod="openshift-route-controller-manager/route-controller-manager-5c5bfd767-77rl5" Oct 11 05:03:48 crc kubenswrapper[4651]: I1011 05:03:48.832813 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/167bd1f4-1446-4888-b0c6-fdc7827f6643-serving-cert\") pod \"route-controller-manager-5c5bfd767-77rl5\" (UID: \"167bd1f4-1446-4888-b0c6-fdc7827f6643\") " pod="openshift-route-controller-manager/route-controller-manager-5c5bfd767-77rl5" Oct 11 05:03:48 crc kubenswrapper[4651]: I1011 05:03:48.934252 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-frkwh\" (UniqueName: \"kubernetes.io/projected/167bd1f4-1446-4888-b0c6-fdc7827f6643-kube-api-access-frkwh\") pod \"route-controller-manager-5c5bfd767-77rl5\" (UID: \"167bd1f4-1446-4888-b0c6-fdc7827f6643\") " pod="openshift-route-controller-manager/route-controller-manager-5c5bfd767-77rl5" Oct 11 05:03:48 crc kubenswrapper[4651]: I1011 05:03:48.934324 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/167bd1f4-1446-4888-b0c6-fdc7827f6643-client-ca\") pod \"route-controller-manager-5c5bfd767-77rl5\" (UID: \"167bd1f4-1446-4888-b0c6-fdc7827f6643\") " pod="openshift-route-controller-manager/route-controller-manager-5c5bfd767-77rl5" Oct 11 05:03:48 crc kubenswrapper[4651]: I1011 05:03:48.934346 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/77f1bab3-a5bb-42c0-b8ba-d20b688d6247-config\") pod \"controller-manager-5bd7bbd845-tm7qg\" (UID: \"77f1bab3-a5bb-42c0-b8ba-d20b688d6247\") " pod="openshift-controller-manager/controller-manager-5bd7bbd845-tm7qg" Oct 11 05:03:48 crc kubenswrapper[4651]: I1011 05:03:48.934384 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/77f1bab3-a5bb-42c0-b8ba-d20b688d6247-serving-cert\") pod \"controller-manager-5bd7bbd845-tm7qg\" (UID: \"77f1bab3-a5bb-42c0-b8ba-d20b688d6247\") " pod="openshift-controller-manager/controller-manager-5bd7bbd845-tm7qg" Oct 11 05:03:48 crc kubenswrapper[4651]: I1011 05:03:48.935127 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/77f1bab3-a5bb-42c0-b8ba-d20b688d6247-proxy-ca-bundles\") pod \"controller-manager-5bd7bbd845-tm7qg\" (UID: \"77f1bab3-a5bb-42c0-b8ba-d20b688d6247\") " pod="openshift-controller-manager/controller-manager-5bd7bbd845-tm7qg" Oct 11 05:03:48 crc kubenswrapper[4651]: I1011 05:03:48.935163 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/167bd1f4-1446-4888-b0c6-fdc7827f6643-client-ca\") pod \"route-controller-manager-5c5bfd767-77rl5\" (UID: \"167bd1f4-1446-4888-b0c6-fdc7827f6643\") " pod="openshift-route-controller-manager/route-controller-manager-5c5bfd767-77rl5" Oct 11 05:03:48 crc kubenswrapper[4651]: I1011 05:03:48.935211 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/77f1bab3-a5bb-42c0-b8ba-d20b688d6247-client-ca\") pod \"controller-manager-5bd7bbd845-tm7qg\" (UID: \"77f1bab3-a5bb-42c0-b8ba-d20b688d6247\") " pod="openshift-controller-manager/controller-manager-5bd7bbd845-tm7qg" Oct 11 05:03:48 crc kubenswrapper[4651]: I1011 05:03:48.935237 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/167bd1f4-1446-4888-b0c6-fdc7827f6643-config\") pod \"route-controller-manager-5c5bfd767-77rl5\" (UID: \"167bd1f4-1446-4888-b0c6-fdc7827f6643\") " pod="openshift-route-controller-manager/route-controller-manager-5c5bfd767-77rl5" Oct 11 05:03:48 crc kubenswrapper[4651]: I1011 05:03:48.935258 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/167bd1f4-1446-4888-b0c6-fdc7827f6643-serving-cert\") pod \"route-controller-manager-5c5bfd767-77rl5\" (UID: \"167bd1f4-1446-4888-b0c6-fdc7827f6643\") " pod="openshift-route-controller-manager/route-controller-manager-5c5bfd767-77rl5" Oct 11 05:03:48 crc kubenswrapper[4651]: I1011 05:03:48.935930 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/77f1bab3-a5bb-42c0-b8ba-d20b688d6247-client-ca\") pod \"controller-manager-5bd7bbd845-tm7qg\" (UID: \"77f1bab3-a5bb-42c0-b8ba-d20b688d6247\") " pod="openshift-controller-manager/controller-manager-5bd7bbd845-tm7qg" Oct 11 05:03:48 crc kubenswrapper[4651]: I1011 05:03:48.936086 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fp5hd\" (UniqueName: \"kubernetes.io/projected/77f1bab3-a5bb-42c0-b8ba-d20b688d6247-kube-api-access-fp5hd\") pod \"controller-manager-5bd7bbd845-tm7qg\" (UID: \"77f1bab3-a5bb-42c0-b8ba-d20b688d6247\") " pod="openshift-controller-manager/controller-manager-5bd7bbd845-tm7qg" Oct 11 05:03:48 crc kubenswrapper[4651]: I1011 05:03:48.936324 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/77f1bab3-a5bb-42c0-b8ba-d20b688d6247-proxy-ca-bundles\") pod \"controller-manager-5bd7bbd845-tm7qg\" (UID: \"77f1bab3-a5bb-42c0-b8ba-d20b688d6247\") " pod="openshift-controller-manager/controller-manager-5bd7bbd845-tm7qg" Oct 11 05:03:48 crc kubenswrapper[4651]: I1011 05:03:48.936367 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/167bd1f4-1446-4888-b0c6-fdc7827f6643-config\") pod \"route-controller-manager-5c5bfd767-77rl5\" (UID: \"167bd1f4-1446-4888-b0c6-fdc7827f6643\") " pod="openshift-route-controller-manager/route-controller-manager-5c5bfd767-77rl5" Oct 11 05:03:48 crc kubenswrapper[4651]: I1011 05:03:48.938591 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/77f1bab3-a5bb-42c0-b8ba-d20b688d6247-config\") pod \"controller-manager-5bd7bbd845-tm7qg\" (UID: \"77f1bab3-a5bb-42c0-b8ba-d20b688d6247\") " pod="openshift-controller-manager/controller-manager-5bd7bbd845-tm7qg" Oct 11 05:03:48 crc kubenswrapper[4651]: I1011 05:03:48.938861 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/77f1bab3-a5bb-42c0-b8ba-d20b688d6247-serving-cert\") pod \"controller-manager-5bd7bbd845-tm7qg\" (UID: \"77f1bab3-a5bb-42c0-b8ba-d20b688d6247\") " pod="openshift-controller-manager/controller-manager-5bd7bbd845-tm7qg" Oct 11 05:03:48 crc kubenswrapper[4651]: I1011 05:03:48.955328 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/167bd1f4-1446-4888-b0c6-fdc7827f6643-serving-cert\") pod \"route-controller-manager-5c5bfd767-77rl5\" (UID: \"167bd1f4-1446-4888-b0c6-fdc7827f6643\") " pod="openshift-route-controller-manager/route-controller-manager-5c5bfd767-77rl5" Oct 11 05:03:48 crc kubenswrapper[4651]: I1011 05:03:48.955484 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-frkwh\" (UniqueName: \"kubernetes.io/projected/167bd1f4-1446-4888-b0c6-fdc7827f6643-kube-api-access-frkwh\") pod \"route-controller-manager-5c5bfd767-77rl5\" (UID: \"167bd1f4-1446-4888-b0c6-fdc7827f6643\") " pod="openshift-route-controller-manager/route-controller-manager-5c5bfd767-77rl5" Oct 11 05:03:48 crc kubenswrapper[4651]: I1011 05:03:48.962126 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fp5hd\" (UniqueName: \"kubernetes.io/projected/77f1bab3-a5bb-42c0-b8ba-d20b688d6247-kube-api-access-fp5hd\") pod \"controller-manager-5bd7bbd845-tm7qg\" (UID: \"77f1bab3-a5bb-42c0-b8ba-d20b688d6247\") " pod="openshift-controller-manager/controller-manager-5bd7bbd845-tm7qg" Oct 11 05:03:49 crc kubenswrapper[4651]: I1011 05:03:49.074222 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5bd7bbd845-tm7qg" Oct 11 05:03:49 crc kubenswrapper[4651]: I1011 05:03:49.080010 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5c5bfd767-77rl5" Oct 11 05:03:49 crc kubenswrapper[4651]: I1011 05:03:49.466432 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5c5bfd767-77rl5"] Oct 11 05:03:49 crc kubenswrapper[4651]: W1011 05:03:49.481033 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod167bd1f4_1446_4888_b0c6_fdc7827f6643.slice/crio-c728effe652b0fa7f00c72272bef3362f2653d4521ec506b57f9f35bba30a3c1 WatchSource:0}: Error finding container c728effe652b0fa7f00c72272bef3362f2653d4521ec506b57f9f35bba30a3c1: Status 404 returned error can't find the container with id c728effe652b0fa7f00c72272bef3362f2653d4521ec506b57f9f35bba30a3c1 Oct 11 05:03:49 crc kubenswrapper[4651]: I1011 05:03:49.509949 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-5bd7bbd845-tm7qg"] Oct 11 05:03:50 crc kubenswrapper[4651]: I1011 05:03:50.279751 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5c5bfd767-77rl5" event={"ID":"167bd1f4-1446-4888-b0c6-fdc7827f6643","Type":"ContainerStarted","Data":"75f7eaba2ba71c7d304d733481273afd62e8d8bfb67fe13c39e5108ae5c087a5"} Oct 11 05:03:50 crc kubenswrapper[4651]: I1011 05:03:50.280209 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5c5bfd767-77rl5" event={"ID":"167bd1f4-1446-4888-b0c6-fdc7827f6643","Type":"ContainerStarted","Data":"c728effe652b0fa7f00c72272bef3362f2653d4521ec506b57f9f35bba30a3c1"} Oct 11 05:03:50 crc kubenswrapper[4651]: I1011 05:03:50.280244 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-5c5bfd767-77rl5" Oct 11 05:03:50 crc kubenswrapper[4651]: I1011 05:03:50.284346 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5bd7bbd845-tm7qg" event={"ID":"77f1bab3-a5bb-42c0-b8ba-d20b688d6247","Type":"ContainerStarted","Data":"2ad38d09ac2ec3198a68cbfe923f63cfb9929f2ed955402397347dc2f19c9ead"} Oct 11 05:03:50 crc kubenswrapper[4651]: I1011 05:03:50.284390 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5bd7bbd845-tm7qg" event={"ID":"77f1bab3-a5bb-42c0-b8ba-d20b688d6247","Type":"ContainerStarted","Data":"8e52e409c89bee8429adabab59545c620170d97538c3e691a3eabc5998f938cb"} Oct 11 05:03:50 crc kubenswrapper[4651]: I1011 05:03:50.284871 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-5bd7bbd845-tm7qg" Oct 11 05:03:50 crc kubenswrapper[4651]: I1011 05:03:50.293307 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-5bd7bbd845-tm7qg" Oct 11 05:03:50 crc kubenswrapper[4651]: I1011 05:03:50.304010 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-5c5bfd767-77rl5" Oct 11 05:03:50 crc kubenswrapper[4651]: I1011 05:03:50.306729 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-5c5bfd767-77rl5" podStartSLOduration=4.306701408 podStartE2EDuration="4.306701408s" podCreationTimestamp="2025-10-11 05:03:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 05:03:50.303204469 +0000 UTC m=+751.199437265" watchObservedRunningTime="2025-10-11 05:03:50.306701408 +0000 UTC m=+751.202934204" Oct 11 05:03:50 crc kubenswrapper[4651]: I1011 05:03:50.327772 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-5bd7bbd845-tm7qg" podStartSLOduration=4.327750322 podStartE2EDuration="4.327750322s" podCreationTimestamp="2025-10-11 05:03:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 05:03:50.323309339 +0000 UTC m=+751.219542135" watchObservedRunningTime="2025-10-11 05:03:50.327750322 +0000 UTC m=+751.223983118" Oct 11 05:03:53 crc kubenswrapper[4651]: I1011 05:03:53.379673 4651 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Oct 11 05:03:54 crc kubenswrapper[4651]: I1011 05:03:54.310145 4651 generic.go:334] "Generic (PLEG): container finished" podID="870fb108-5a7a-4ade-82fd-8cf3c09950b8" containerID="bc4d351d73600c78ae082644ca443f7e95d3f627ec45ce4daf26783c0fb368ff" exitCode=0 Oct 11 05:03:54 crc kubenswrapper[4651]: I1011 05:03:54.310191 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-x9n5l" event={"ID":"870fb108-5a7a-4ade-82fd-8cf3c09950b8","Type":"ContainerDied","Data":"bc4d351d73600c78ae082644ca443f7e95d3f627ec45ce4daf26783c0fb368ff"} Oct 11 05:03:54 crc kubenswrapper[4651]: I1011 05:03:54.312039 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-sxrwh" event={"ID":"4a2881f0-834f-4e9d-8be5-5adb1f5feefd","Type":"ContainerStarted","Data":"0fe63b5bba3ca9a71b98342bcdb1cd83642a31144840f37aba3fb9c7649a3026"} Oct 11 05:03:54 crc kubenswrapper[4651]: I1011 05:03:54.312202 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-sxrwh" Oct 11 05:03:54 crc kubenswrapper[4651]: I1011 05:03:54.352381 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-sxrwh" podStartSLOduration=2.119249201 podStartE2EDuration="10.352362015s" podCreationTimestamp="2025-10-11 05:03:44 +0000 UTC" firstStartedPulling="2025-10-11 05:03:45.720622253 +0000 UTC m=+746.616855049" lastFinishedPulling="2025-10-11 05:03:53.953735067 +0000 UTC m=+754.849967863" observedRunningTime="2025-10-11 05:03:54.35138579 +0000 UTC m=+755.247618616" watchObservedRunningTime="2025-10-11 05:03:54.352362015 +0000 UTC m=+755.248594811" Oct 11 05:03:55 crc kubenswrapper[4651]: I1011 05:03:55.320625 4651 generic.go:334] "Generic (PLEG): container finished" podID="870fb108-5a7a-4ade-82fd-8cf3c09950b8" containerID="dae1d48001b79e2c947ca93d4419a6c6254a434bde30cf4c0a1a4173d542b38a" exitCode=0 Oct 11 05:03:55 crc kubenswrapper[4651]: I1011 05:03:55.320840 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-x9n5l" event={"ID":"870fb108-5a7a-4ade-82fd-8cf3c09950b8","Type":"ContainerDied","Data":"dae1d48001b79e2c947ca93d4419a6c6254a434bde30cf4c0a1a4173d542b38a"} Oct 11 05:03:55 crc kubenswrapper[4651]: I1011 05:03:55.356901 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-68d546b9d8-gbzvh" Oct 11 05:03:56 crc kubenswrapper[4651]: I1011 05:03:56.328336 4651 generic.go:334] "Generic (PLEG): container finished" podID="870fb108-5a7a-4ade-82fd-8cf3c09950b8" containerID="fb9b88424c5f6d50e4dfe61a3476af06a375907f0da9a8d44cabd67599b824ea" exitCode=0 Oct 11 05:03:56 crc kubenswrapper[4651]: I1011 05:03:56.328482 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-x9n5l" event={"ID":"870fb108-5a7a-4ade-82fd-8cf3c09950b8","Type":"ContainerDied","Data":"fb9b88424c5f6d50e4dfe61a3476af06a375907f0da9a8d44cabd67599b824ea"} Oct 11 05:03:57 crc kubenswrapper[4651]: I1011 05:03:57.340684 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-x9n5l" event={"ID":"870fb108-5a7a-4ade-82fd-8cf3c09950b8","Type":"ContainerStarted","Data":"d946dd4e90ab1c772075d241b94b6a828b07ff131c0647b662c8769f06161ed0"} Oct 11 05:03:57 crc kubenswrapper[4651]: I1011 05:03:57.340722 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-x9n5l" event={"ID":"870fb108-5a7a-4ade-82fd-8cf3c09950b8","Type":"ContainerStarted","Data":"88db0bc83700ee26489d139f5c7d31413390802adc2fc0cd17ac4a93bd147841"} Oct 11 05:03:57 crc kubenswrapper[4651]: I1011 05:03:57.340732 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-x9n5l" event={"ID":"870fb108-5a7a-4ade-82fd-8cf3c09950b8","Type":"ContainerStarted","Data":"e31f8774e1d84fea0cac1c0fb881165d46177b328081367422753ddf555d84b5"} Oct 11 05:03:57 crc kubenswrapper[4651]: I1011 05:03:57.340741 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-x9n5l" event={"ID":"870fb108-5a7a-4ade-82fd-8cf3c09950b8","Type":"ContainerStarted","Data":"87830ad5f7f98d97d13c85acffeffd18703d0e60921d673017a1b1e6feb94680"} Oct 11 05:03:57 crc kubenswrapper[4651]: I1011 05:03:57.340749 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-x9n5l" event={"ID":"870fb108-5a7a-4ade-82fd-8cf3c09950b8","Type":"ContainerStarted","Data":"23f3df42bf31374d6ca435b294857524067693de880d7dd05f1baf04612dbae8"} Oct 11 05:03:57 crc kubenswrapper[4651]: I1011 05:03:57.340757 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-x9n5l" event={"ID":"870fb108-5a7a-4ade-82fd-8cf3c09950b8","Type":"ContainerStarted","Data":"ad511f6e36fa8edbe0434bcb357575e5153d0e72b56c6f92d4eb5e96fdfaa28b"} Oct 11 05:03:57 crc kubenswrapper[4651]: I1011 05:03:57.341073 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-x9n5l" Oct 11 05:03:57 crc kubenswrapper[4651]: I1011 05:03:57.367712 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-x9n5l" podStartSLOduration=4.839931659 podStartE2EDuration="13.367692891s" podCreationTimestamp="2025-10-11 05:03:44 +0000 UTC" firstStartedPulling="2025-10-11 05:03:45.46399559 +0000 UTC m=+746.360228396" lastFinishedPulling="2025-10-11 05:03:53.991756822 +0000 UTC m=+754.887989628" observedRunningTime="2025-10-11 05:03:57.36491754 +0000 UTC m=+758.261150356" watchObservedRunningTime="2025-10-11 05:03:57.367692891 +0000 UTC m=+758.263925697" Oct 11 05:04:00 crc kubenswrapper[4651]: I1011 05:04:00.268101 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-x9n5l" Oct 11 05:04:00 crc kubenswrapper[4651]: I1011 05:04:00.305251 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-x9n5l" Oct 11 05:04:05 crc kubenswrapper[4651]: I1011 05:04:05.266004 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-sxrwh" Oct 11 05:04:06 crc kubenswrapper[4651]: I1011 05:04:06.842938 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-j7mlx" Oct 11 05:04:09 crc kubenswrapper[4651]: I1011 05:04:09.768871 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-d4rdk"] Oct 11 05:04:09 crc kubenswrapper[4651]: I1011 05:04:09.796573 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-d4rdk"] Oct 11 05:04:09 crc kubenswrapper[4651]: I1011 05:04:09.796737 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-d4rdk" Oct 11 05:04:09 crc kubenswrapper[4651]: I1011 05:04:09.800851 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Oct 11 05:04:09 crc kubenswrapper[4651]: I1011 05:04:09.800997 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Oct 11 05:04:09 crc kubenswrapper[4651]: I1011 05:04:09.888180 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zgnct\" (UniqueName: \"kubernetes.io/projected/c5f8d248-56ea-4a91-9297-01ce2426ecea-kube-api-access-zgnct\") pod \"openstack-operator-index-d4rdk\" (UID: \"c5f8d248-56ea-4a91-9297-01ce2426ecea\") " pod="openstack-operators/openstack-operator-index-d4rdk" Oct 11 05:04:09 crc kubenswrapper[4651]: I1011 05:04:09.989938 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zgnct\" (UniqueName: \"kubernetes.io/projected/c5f8d248-56ea-4a91-9297-01ce2426ecea-kube-api-access-zgnct\") pod \"openstack-operator-index-d4rdk\" (UID: \"c5f8d248-56ea-4a91-9297-01ce2426ecea\") " pod="openstack-operators/openstack-operator-index-d4rdk" Oct 11 05:04:10 crc kubenswrapper[4651]: I1011 05:04:10.012078 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zgnct\" (UniqueName: \"kubernetes.io/projected/c5f8d248-56ea-4a91-9297-01ce2426ecea-kube-api-access-zgnct\") pod \"openstack-operator-index-d4rdk\" (UID: \"c5f8d248-56ea-4a91-9297-01ce2426ecea\") " pod="openstack-operators/openstack-operator-index-d4rdk" Oct 11 05:04:10 crc kubenswrapper[4651]: I1011 05:04:10.152814 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-d4rdk" Oct 11 05:04:10 crc kubenswrapper[4651]: I1011 05:04:10.596524 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-d4rdk"] Oct 11 05:04:10 crc kubenswrapper[4651]: W1011 05:04:10.606490 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc5f8d248_56ea_4a91_9297_01ce2426ecea.slice/crio-00d41cff285b407719e36098ab4e0eacbc730b87cf5cce8bd8ac14ac461153f0 WatchSource:0}: Error finding container 00d41cff285b407719e36098ab4e0eacbc730b87cf5cce8bd8ac14ac461153f0: Status 404 returned error can't find the container with id 00d41cff285b407719e36098ab4e0eacbc730b87cf5cce8bd8ac14ac461153f0 Oct 11 05:04:11 crc kubenswrapper[4651]: I1011 05:04:11.445531 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-d4rdk" event={"ID":"c5f8d248-56ea-4a91-9297-01ce2426ecea","Type":"ContainerStarted","Data":"00d41cff285b407719e36098ab4e0eacbc730b87cf5cce8bd8ac14ac461153f0"} Oct 11 05:04:13 crc kubenswrapper[4651]: I1011 05:04:13.143675 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-d4rdk"] Oct 11 05:04:13 crc kubenswrapper[4651]: I1011 05:04:13.460609 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-d4rdk" event={"ID":"c5f8d248-56ea-4a91-9297-01ce2426ecea","Type":"ContainerStarted","Data":"40f5fd57e2d8c0fbb76c5db3e614a608a5a32248bbdfcc7398cdb9c0703d395b"} Oct 11 05:04:13 crc kubenswrapper[4651]: I1011 05:04:13.484021 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-d4rdk" podStartSLOduration=2.428130864 podStartE2EDuration="4.483990825s" podCreationTimestamp="2025-10-11 05:04:09 +0000 UTC" firstStartedPulling="2025-10-11 05:04:10.610938352 +0000 UTC m=+771.507171178" lastFinishedPulling="2025-10-11 05:04:12.666798313 +0000 UTC m=+773.563031139" observedRunningTime="2025-10-11 05:04:13.481304937 +0000 UTC m=+774.377537833" watchObservedRunningTime="2025-10-11 05:04:13.483990825 +0000 UTC m=+774.380223661" Oct 11 05:04:13 crc kubenswrapper[4651]: I1011 05:04:13.758635 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-q6f6m"] Oct 11 05:04:13 crc kubenswrapper[4651]: I1011 05:04:13.760025 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-q6f6m" Oct 11 05:04:13 crc kubenswrapper[4651]: I1011 05:04:13.763870 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-q6f6m"] Oct 11 05:04:13 crc kubenswrapper[4651]: I1011 05:04:13.765973 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-b962d" Oct 11 05:04:13 crc kubenswrapper[4651]: I1011 05:04:13.843198 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xk4vw\" (UniqueName: \"kubernetes.io/projected/69aec95d-9651-4e5f-9cb6-a6ca9d5093f5-kube-api-access-xk4vw\") pod \"openstack-operator-index-q6f6m\" (UID: \"69aec95d-9651-4e5f-9cb6-a6ca9d5093f5\") " pod="openstack-operators/openstack-operator-index-q6f6m" Oct 11 05:04:13 crc kubenswrapper[4651]: I1011 05:04:13.944938 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xk4vw\" (UniqueName: \"kubernetes.io/projected/69aec95d-9651-4e5f-9cb6-a6ca9d5093f5-kube-api-access-xk4vw\") pod \"openstack-operator-index-q6f6m\" (UID: \"69aec95d-9651-4e5f-9cb6-a6ca9d5093f5\") " pod="openstack-operators/openstack-operator-index-q6f6m" Oct 11 05:04:13 crc kubenswrapper[4651]: I1011 05:04:13.977470 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xk4vw\" (UniqueName: \"kubernetes.io/projected/69aec95d-9651-4e5f-9cb6-a6ca9d5093f5-kube-api-access-xk4vw\") pod \"openstack-operator-index-q6f6m\" (UID: \"69aec95d-9651-4e5f-9cb6-a6ca9d5093f5\") " pod="openstack-operators/openstack-operator-index-q6f6m" Oct 11 05:04:14 crc kubenswrapper[4651]: I1011 05:04:14.077725 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-q6f6m" Oct 11 05:04:14 crc kubenswrapper[4651]: I1011 05:04:14.467578 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-index-d4rdk" podUID="c5f8d248-56ea-4a91-9297-01ce2426ecea" containerName="registry-server" containerID="cri-o://40f5fd57e2d8c0fbb76c5db3e614a608a5a32248bbdfcc7398cdb9c0703d395b" gracePeriod=2 Oct 11 05:04:14 crc kubenswrapper[4651]: I1011 05:04:14.553713 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-q6f6m"] Oct 11 05:04:14 crc kubenswrapper[4651]: W1011 05:04:14.571454 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod69aec95d_9651_4e5f_9cb6_a6ca9d5093f5.slice/crio-050a03522c018ac92040911b6e795855aa624eb8c302304a2161e2dc09fed32d WatchSource:0}: Error finding container 050a03522c018ac92040911b6e795855aa624eb8c302304a2161e2dc09fed32d: Status 404 returned error can't find the container with id 050a03522c018ac92040911b6e795855aa624eb8c302304a2161e2dc09fed32d Oct 11 05:04:14 crc kubenswrapper[4651]: I1011 05:04:14.940296 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-d4rdk" Oct 11 05:04:14 crc kubenswrapper[4651]: I1011 05:04:14.972026 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgnct\" (UniqueName: \"kubernetes.io/projected/c5f8d248-56ea-4a91-9297-01ce2426ecea-kube-api-access-zgnct\") pod \"c5f8d248-56ea-4a91-9297-01ce2426ecea\" (UID: \"c5f8d248-56ea-4a91-9297-01ce2426ecea\") " Oct 11 05:04:14 crc kubenswrapper[4651]: I1011 05:04:14.977713 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c5f8d248-56ea-4a91-9297-01ce2426ecea-kube-api-access-zgnct" (OuterVolumeSpecName: "kube-api-access-zgnct") pod "c5f8d248-56ea-4a91-9297-01ce2426ecea" (UID: "c5f8d248-56ea-4a91-9297-01ce2426ecea"). InnerVolumeSpecName "kube-api-access-zgnct". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:04:15 crc kubenswrapper[4651]: I1011 05:04:15.073805 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgnct\" (UniqueName: \"kubernetes.io/projected/c5f8d248-56ea-4a91-9297-01ce2426ecea-kube-api-access-zgnct\") on node \"crc\" DevicePath \"\"" Oct 11 05:04:15 crc kubenswrapper[4651]: I1011 05:04:15.272926 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-x9n5l" Oct 11 05:04:15 crc kubenswrapper[4651]: I1011 05:04:15.478236 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-q6f6m" event={"ID":"69aec95d-9651-4e5f-9cb6-a6ca9d5093f5","Type":"ContainerStarted","Data":"3a74581335e3368bc2d842adb8fb4ac6cd8daa67d93acfd00162cbac7155e8cd"} Oct 11 05:04:15 crc kubenswrapper[4651]: I1011 05:04:15.478323 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-q6f6m" event={"ID":"69aec95d-9651-4e5f-9cb6-a6ca9d5093f5","Type":"ContainerStarted","Data":"050a03522c018ac92040911b6e795855aa624eb8c302304a2161e2dc09fed32d"} Oct 11 05:04:15 crc kubenswrapper[4651]: I1011 05:04:15.480594 4651 generic.go:334] "Generic (PLEG): container finished" podID="c5f8d248-56ea-4a91-9297-01ce2426ecea" containerID="40f5fd57e2d8c0fbb76c5db3e614a608a5a32248bbdfcc7398cdb9c0703d395b" exitCode=0 Oct 11 05:04:15 crc kubenswrapper[4651]: I1011 05:04:15.480648 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-d4rdk" event={"ID":"c5f8d248-56ea-4a91-9297-01ce2426ecea","Type":"ContainerDied","Data":"40f5fd57e2d8c0fbb76c5db3e614a608a5a32248bbdfcc7398cdb9c0703d395b"} Oct 11 05:04:15 crc kubenswrapper[4651]: I1011 05:04:15.480664 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-d4rdk" Oct 11 05:04:15 crc kubenswrapper[4651]: I1011 05:04:15.480686 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-d4rdk" event={"ID":"c5f8d248-56ea-4a91-9297-01ce2426ecea","Type":"ContainerDied","Data":"00d41cff285b407719e36098ab4e0eacbc730b87cf5cce8bd8ac14ac461153f0"} Oct 11 05:04:15 crc kubenswrapper[4651]: I1011 05:04:15.480727 4651 scope.go:117] "RemoveContainer" containerID="40f5fd57e2d8c0fbb76c5db3e614a608a5a32248bbdfcc7398cdb9c0703d395b" Oct 11 05:04:15 crc kubenswrapper[4651]: I1011 05:04:15.498763 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-q6f6m" podStartSLOduration=2.436109865 podStartE2EDuration="2.498740824s" podCreationTimestamp="2025-10-11 05:04:13 +0000 UTC" firstStartedPulling="2025-10-11 05:04:14.576997338 +0000 UTC m=+775.473230134" lastFinishedPulling="2025-10-11 05:04:14.639628287 +0000 UTC m=+775.535861093" observedRunningTime="2025-10-11 05:04:15.493204783 +0000 UTC m=+776.389437629" watchObservedRunningTime="2025-10-11 05:04:15.498740824 +0000 UTC m=+776.394973630" Oct 11 05:04:15 crc kubenswrapper[4651]: I1011 05:04:15.508592 4651 scope.go:117] "RemoveContainer" containerID="40f5fd57e2d8c0fbb76c5db3e614a608a5a32248bbdfcc7398cdb9c0703d395b" Oct 11 05:04:15 crc kubenswrapper[4651]: E1011 05:04:15.509004 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"40f5fd57e2d8c0fbb76c5db3e614a608a5a32248bbdfcc7398cdb9c0703d395b\": container with ID starting with 40f5fd57e2d8c0fbb76c5db3e614a608a5a32248bbdfcc7398cdb9c0703d395b not found: ID does not exist" containerID="40f5fd57e2d8c0fbb76c5db3e614a608a5a32248bbdfcc7398cdb9c0703d395b" Oct 11 05:04:15 crc kubenswrapper[4651]: I1011 05:04:15.509031 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"40f5fd57e2d8c0fbb76c5db3e614a608a5a32248bbdfcc7398cdb9c0703d395b"} err="failed to get container status \"40f5fd57e2d8c0fbb76c5db3e614a608a5a32248bbdfcc7398cdb9c0703d395b\": rpc error: code = NotFound desc = could not find container \"40f5fd57e2d8c0fbb76c5db3e614a608a5a32248bbdfcc7398cdb9c0703d395b\": container with ID starting with 40f5fd57e2d8c0fbb76c5db3e614a608a5a32248bbdfcc7398cdb9c0703d395b not found: ID does not exist" Oct 11 05:04:15 crc kubenswrapper[4651]: I1011 05:04:15.523221 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-d4rdk"] Oct 11 05:04:15 crc kubenswrapper[4651]: I1011 05:04:15.527596 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-index-d4rdk"] Oct 11 05:04:15 crc kubenswrapper[4651]: I1011 05:04:15.885546 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c5f8d248-56ea-4a91-9297-01ce2426ecea" path="/var/lib/kubelet/pods/c5f8d248-56ea-4a91-9297-01ce2426ecea/volumes" Oct 11 05:04:16 crc kubenswrapper[4651]: I1011 05:04:16.310205 4651 patch_prober.go:28] interesting pod/machine-config-daemon-78jnv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 05:04:16 crc kubenswrapper[4651]: I1011 05:04:16.310277 4651 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 05:04:16 crc kubenswrapper[4651]: I1011 05:04:16.310352 4651 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" Oct 11 05:04:16 crc kubenswrapper[4651]: I1011 05:04:16.311107 4651 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"f3d31fd3172b3e1939d18cb8fc4eb85b3b6d1b1c4f71fa7a9aed3462d80c8443"} pod="openshift-machine-config-operator/machine-config-daemon-78jnv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 11 05:04:16 crc kubenswrapper[4651]: I1011 05:04:16.311210 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" containerName="machine-config-daemon" containerID="cri-o://f3d31fd3172b3e1939d18cb8fc4eb85b3b6d1b1c4f71fa7a9aed3462d80c8443" gracePeriod=600 Oct 11 05:04:16 crc kubenswrapper[4651]: I1011 05:04:16.498178 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" event={"ID":"519a1ae1-e964-48b0-8b61-835146df28c1","Type":"ContainerDied","Data":"f3d31fd3172b3e1939d18cb8fc4eb85b3b6d1b1c4f71fa7a9aed3462d80c8443"} Oct 11 05:04:16 crc kubenswrapper[4651]: I1011 05:04:16.498761 4651 scope.go:117] "RemoveContainer" containerID="4c4da425137942b26402e87d800a133130761ddfde8b5ad1911ac803f8d0758d" Oct 11 05:04:16 crc kubenswrapper[4651]: I1011 05:04:16.498102 4651 generic.go:334] "Generic (PLEG): container finished" podID="519a1ae1-e964-48b0-8b61-835146df28c1" containerID="f3d31fd3172b3e1939d18cb8fc4eb85b3b6d1b1c4f71fa7a9aed3462d80c8443" exitCode=0 Oct 11 05:04:17 crc kubenswrapper[4651]: I1011 05:04:17.511955 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" event={"ID":"519a1ae1-e964-48b0-8b61-835146df28c1","Type":"ContainerStarted","Data":"bbcfc5308211a05ce73ce546a00d78ee49c4d35fa44427537e93a8a405fe9270"} Oct 11 05:04:24 crc kubenswrapper[4651]: I1011 05:04:24.078556 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-q6f6m" Oct 11 05:04:24 crc kubenswrapper[4651]: I1011 05:04:24.079904 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-q6f6m" Oct 11 05:04:24 crc kubenswrapper[4651]: I1011 05:04:24.123676 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-q6f6m" Oct 11 05:04:24 crc kubenswrapper[4651]: I1011 05:04:24.610531 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-q6f6m" Oct 11 05:04:25 crc kubenswrapper[4651]: I1011 05:04:25.962540 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-pr2mb"] Oct 11 05:04:25 crc kubenswrapper[4651]: E1011 05:04:25.963032 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5f8d248-56ea-4a91-9297-01ce2426ecea" containerName="registry-server" Oct 11 05:04:25 crc kubenswrapper[4651]: I1011 05:04:25.963058 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5f8d248-56ea-4a91-9297-01ce2426ecea" containerName="registry-server" Oct 11 05:04:25 crc kubenswrapper[4651]: I1011 05:04:25.963271 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="c5f8d248-56ea-4a91-9297-01ce2426ecea" containerName="registry-server" Oct 11 05:04:25 crc kubenswrapper[4651]: I1011 05:04:25.967460 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pr2mb" Oct 11 05:04:25 crc kubenswrapper[4651]: I1011 05:04:25.980352 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-pr2mb"] Oct 11 05:04:26 crc kubenswrapper[4651]: I1011 05:04:26.150991 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b18db9ad-7e2e-41c7-815d-603953e750fb-utilities\") pod \"redhat-marketplace-pr2mb\" (UID: \"b18db9ad-7e2e-41c7-815d-603953e750fb\") " pod="openshift-marketplace/redhat-marketplace-pr2mb" Oct 11 05:04:26 crc kubenswrapper[4651]: I1011 05:04:26.151063 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lnl5c\" (UniqueName: \"kubernetes.io/projected/b18db9ad-7e2e-41c7-815d-603953e750fb-kube-api-access-lnl5c\") pod \"redhat-marketplace-pr2mb\" (UID: \"b18db9ad-7e2e-41c7-815d-603953e750fb\") " pod="openshift-marketplace/redhat-marketplace-pr2mb" Oct 11 05:04:26 crc kubenswrapper[4651]: I1011 05:04:26.151118 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b18db9ad-7e2e-41c7-815d-603953e750fb-catalog-content\") pod \"redhat-marketplace-pr2mb\" (UID: \"b18db9ad-7e2e-41c7-815d-603953e750fb\") " pod="openshift-marketplace/redhat-marketplace-pr2mb" Oct 11 05:04:26 crc kubenswrapper[4651]: I1011 05:04:26.252438 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b18db9ad-7e2e-41c7-815d-603953e750fb-catalog-content\") pod \"redhat-marketplace-pr2mb\" (UID: \"b18db9ad-7e2e-41c7-815d-603953e750fb\") " pod="openshift-marketplace/redhat-marketplace-pr2mb" Oct 11 05:04:26 crc kubenswrapper[4651]: I1011 05:04:26.252534 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b18db9ad-7e2e-41c7-815d-603953e750fb-utilities\") pod \"redhat-marketplace-pr2mb\" (UID: \"b18db9ad-7e2e-41c7-815d-603953e750fb\") " pod="openshift-marketplace/redhat-marketplace-pr2mb" Oct 11 05:04:26 crc kubenswrapper[4651]: I1011 05:04:26.252634 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lnl5c\" (UniqueName: \"kubernetes.io/projected/b18db9ad-7e2e-41c7-815d-603953e750fb-kube-api-access-lnl5c\") pod \"redhat-marketplace-pr2mb\" (UID: \"b18db9ad-7e2e-41c7-815d-603953e750fb\") " pod="openshift-marketplace/redhat-marketplace-pr2mb" Oct 11 05:04:26 crc kubenswrapper[4651]: I1011 05:04:26.253036 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b18db9ad-7e2e-41c7-815d-603953e750fb-catalog-content\") pod \"redhat-marketplace-pr2mb\" (UID: \"b18db9ad-7e2e-41c7-815d-603953e750fb\") " pod="openshift-marketplace/redhat-marketplace-pr2mb" Oct 11 05:04:26 crc kubenswrapper[4651]: I1011 05:04:26.253054 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b18db9ad-7e2e-41c7-815d-603953e750fb-utilities\") pod \"redhat-marketplace-pr2mb\" (UID: \"b18db9ad-7e2e-41c7-815d-603953e750fb\") " pod="openshift-marketplace/redhat-marketplace-pr2mb" Oct 11 05:04:26 crc kubenswrapper[4651]: I1011 05:04:26.291951 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lnl5c\" (UniqueName: \"kubernetes.io/projected/b18db9ad-7e2e-41c7-815d-603953e750fb-kube-api-access-lnl5c\") pod \"redhat-marketplace-pr2mb\" (UID: \"b18db9ad-7e2e-41c7-815d-603953e750fb\") " pod="openshift-marketplace/redhat-marketplace-pr2mb" Oct 11 05:04:26 crc kubenswrapper[4651]: I1011 05:04:26.584366 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pr2mb" Oct 11 05:04:26 crc kubenswrapper[4651]: I1011 05:04:26.989995 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-pr2mb"] Oct 11 05:04:27 crc kubenswrapper[4651]: I1011 05:04:27.606951 4651 generic.go:334] "Generic (PLEG): container finished" podID="b18db9ad-7e2e-41c7-815d-603953e750fb" containerID="358f18d0970fcb42b313816d5c3f8511ad32e13c94690a7fc926ec4f8e3b70bb" exitCode=0 Oct 11 05:04:27 crc kubenswrapper[4651]: I1011 05:04:27.607044 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pr2mb" event={"ID":"b18db9ad-7e2e-41c7-815d-603953e750fb","Type":"ContainerDied","Data":"358f18d0970fcb42b313816d5c3f8511ad32e13c94690a7fc926ec4f8e3b70bb"} Oct 11 05:04:27 crc kubenswrapper[4651]: I1011 05:04:27.607291 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pr2mb" event={"ID":"b18db9ad-7e2e-41c7-815d-603953e750fb","Type":"ContainerStarted","Data":"624b05590dbea06aea0b7ae0ecd15d57a0308c4d5db6ba9e1b678367fd4c725f"} Oct 11 05:04:28 crc kubenswrapper[4651]: I1011 05:04:28.617668 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pr2mb" event={"ID":"b18db9ad-7e2e-41c7-815d-603953e750fb","Type":"ContainerStarted","Data":"a95d5a8527a87c48687d5ad008f77e18133c3ae52126a970cada35ee8f3ea450"} Oct 11 05:04:29 crc kubenswrapper[4651]: I1011 05:04:29.626953 4651 generic.go:334] "Generic (PLEG): container finished" podID="b18db9ad-7e2e-41c7-815d-603953e750fb" containerID="a95d5a8527a87c48687d5ad008f77e18133c3ae52126a970cada35ee8f3ea450" exitCode=0 Oct 11 05:04:29 crc kubenswrapper[4651]: I1011 05:04:29.627008 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pr2mb" event={"ID":"b18db9ad-7e2e-41c7-815d-603953e750fb","Type":"ContainerDied","Data":"a95d5a8527a87c48687d5ad008f77e18133c3ae52126a970cada35ee8f3ea450"} Oct 11 05:04:30 crc kubenswrapper[4651]: I1011 05:04:30.636556 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pr2mb" event={"ID":"b18db9ad-7e2e-41c7-815d-603953e750fb","Type":"ContainerStarted","Data":"22daaf07d85ba9e6bdd4c6d2485ec771a26b0dbc6ebac27ebfe5dd6c3a64ab9d"} Oct 11 05:04:30 crc kubenswrapper[4651]: I1011 05:04:30.677985 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-pr2mb" podStartSLOduration=3.2546402150000002 podStartE2EDuration="5.677957104s" podCreationTimestamp="2025-10-11 05:04:25 +0000 UTC" firstStartedPulling="2025-10-11 05:04:27.608479604 +0000 UTC m=+788.504712440" lastFinishedPulling="2025-10-11 05:04:30.031796533 +0000 UTC m=+790.928029329" observedRunningTime="2025-10-11 05:04:30.664431941 +0000 UTC m=+791.560664747" watchObservedRunningTime="2025-10-11 05:04:30.677957104 +0000 UTC m=+791.574189920" Oct 11 05:04:31 crc kubenswrapper[4651]: I1011 05:04:31.998620 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/4354d16eef074529f4cafdce2fc6f7b790d4265439b68294d417143fbf94bw4"] Oct 11 05:04:31 crc kubenswrapper[4651]: I1011 05:04:31.999774 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/4354d16eef074529f4cafdce2fc6f7b790d4265439b68294d417143fbf94bw4" Oct 11 05:04:32 crc kubenswrapper[4651]: I1011 05:04:32.001866 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-hh4jw" Oct 11 05:04:32 crc kubenswrapper[4651]: I1011 05:04:32.015953 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/4354d16eef074529f4cafdce2fc6f7b790d4265439b68294d417143fbf94bw4"] Oct 11 05:04:32 crc kubenswrapper[4651]: I1011 05:04:32.131170 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nvpkd\" (UniqueName: \"kubernetes.io/projected/7f8c3a2b-8cc3-4266-bdb2-e896769c8da4-kube-api-access-nvpkd\") pod \"4354d16eef074529f4cafdce2fc6f7b790d4265439b68294d417143fbf94bw4\" (UID: \"7f8c3a2b-8cc3-4266-bdb2-e896769c8da4\") " pod="openstack-operators/4354d16eef074529f4cafdce2fc6f7b790d4265439b68294d417143fbf94bw4" Oct 11 05:04:32 crc kubenswrapper[4651]: I1011 05:04:32.131512 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/7f8c3a2b-8cc3-4266-bdb2-e896769c8da4-bundle\") pod \"4354d16eef074529f4cafdce2fc6f7b790d4265439b68294d417143fbf94bw4\" (UID: \"7f8c3a2b-8cc3-4266-bdb2-e896769c8da4\") " pod="openstack-operators/4354d16eef074529f4cafdce2fc6f7b790d4265439b68294d417143fbf94bw4" Oct 11 05:04:32 crc kubenswrapper[4651]: I1011 05:04:32.131642 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/7f8c3a2b-8cc3-4266-bdb2-e896769c8da4-util\") pod \"4354d16eef074529f4cafdce2fc6f7b790d4265439b68294d417143fbf94bw4\" (UID: \"7f8c3a2b-8cc3-4266-bdb2-e896769c8da4\") " pod="openstack-operators/4354d16eef074529f4cafdce2fc6f7b790d4265439b68294d417143fbf94bw4" Oct 11 05:04:32 crc kubenswrapper[4651]: I1011 05:04:32.232654 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nvpkd\" (UniqueName: \"kubernetes.io/projected/7f8c3a2b-8cc3-4266-bdb2-e896769c8da4-kube-api-access-nvpkd\") pod \"4354d16eef074529f4cafdce2fc6f7b790d4265439b68294d417143fbf94bw4\" (UID: \"7f8c3a2b-8cc3-4266-bdb2-e896769c8da4\") " pod="openstack-operators/4354d16eef074529f4cafdce2fc6f7b790d4265439b68294d417143fbf94bw4" Oct 11 05:04:32 crc kubenswrapper[4651]: I1011 05:04:32.232774 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/7f8c3a2b-8cc3-4266-bdb2-e896769c8da4-bundle\") pod \"4354d16eef074529f4cafdce2fc6f7b790d4265439b68294d417143fbf94bw4\" (UID: \"7f8c3a2b-8cc3-4266-bdb2-e896769c8da4\") " pod="openstack-operators/4354d16eef074529f4cafdce2fc6f7b790d4265439b68294d417143fbf94bw4" Oct 11 05:04:32 crc kubenswrapper[4651]: I1011 05:04:32.232799 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/7f8c3a2b-8cc3-4266-bdb2-e896769c8da4-util\") pod \"4354d16eef074529f4cafdce2fc6f7b790d4265439b68294d417143fbf94bw4\" (UID: \"7f8c3a2b-8cc3-4266-bdb2-e896769c8da4\") " pod="openstack-operators/4354d16eef074529f4cafdce2fc6f7b790d4265439b68294d417143fbf94bw4" Oct 11 05:04:32 crc kubenswrapper[4651]: I1011 05:04:32.233382 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/7f8c3a2b-8cc3-4266-bdb2-e896769c8da4-util\") pod \"4354d16eef074529f4cafdce2fc6f7b790d4265439b68294d417143fbf94bw4\" (UID: \"7f8c3a2b-8cc3-4266-bdb2-e896769c8da4\") " pod="openstack-operators/4354d16eef074529f4cafdce2fc6f7b790d4265439b68294d417143fbf94bw4" Oct 11 05:04:32 crc kubenswrapper[4651]: I1011 05:04:32.233653 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/7f8c3a2b-8cc3-4266-bdb2-e896769c8da4-bundle\") pod \"4354d16eef074529f4cafdce2fc6f7b790d4265439b68294d417143fbf94bw4\" (UID: \"7f8c3a2b-8cc3-4266-bdb2-e896769c8da4\") " pod="openstack-operators/4354d16eef074529f4cafdce2fc6f7b790d4265439b68294d417143fbf94bw4" Oct 11 05:04:32 crc kubenswrapper[4651]: I1011 05:04:32.256956 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nvpkd\" (UniqueName: \"kubernetes.io/projected/7f8c3a2b-8cc3-4266-bdb2-e896769c8da4-kube-api-access-nvpkd\") pod \"4354d16eef074529f4cafdce2fc6f7b790d4265439b68294d417143fbf94bw4\" (UID: \"7f8c3a2b-8cc3-4266-bdb2-e896769c8da4\") " pod="openstack-operators/4354d16eef074529f4cafdce2fc6f7b790d4265439b68294d417143fbf94bw4" Oct 11 05:04:32 crc kubenswrapper[4651]: I1011 05:04:32.318061 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/4354d16eef074529f4cafdce2fc6f7b790d4265439b68294d417143fbf94bw4" Oct 11 05:04:32 crc kubenswrapper[4651]: I1011 05:04:32.760094 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/4354d16eef074529f4cafdce2fc6f7b790d4265439b68294d417143fbf94bw4"] Oct 11 05:04:32 crc kubenswrapper[4651]: W1011 05:04:32.762284 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7f8c3a2b_8cc3_4266_bdb2_e896769c8da4.slice/crio-4602e5a4a5c55c3c2ef1bf75e6c62cacffe829390bb9bdef8b181b44eebef8a1 WatchSource:0}: Error finding container 4602e5a4a5c55c3c2ef1bf75e6c62cacffe829390bb9bdef8b181b44eebef8a1: Status 404 returned error can't find the container with id 4602e5a4a5c55c3c2ef1bf75e6c62cacffe829390bb9bdef8b181b44eebef8a1 Oct 11 05:04:33 crc kubenswrapper[4651]: I1011 05:04:33.666112 4651 generic.go:334] "Generic (PLEG): container finished" podID="7f8c3a2b-8cc3-4266-bdb2-e896769c8da4" containerID="59af9d87164bbb18439870ddd73b5c0844eb96cdefd6ad4715153386e11237d5" exitCode=0 Oct 11 05:04:33 crc kubenswrapper[4651]: I1011 05:04:33.666186 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/4354d16eef074529f4cafdce2fc6f7b790d4265439b68294d417143fbf94bw4" event={"ID":"7f8c3a2b-8cc3-4266-bdb2-e896769c8da4","Type":"ContainerDied","Data":"59af9d87164bbb18439870ddd73b5c0844eb96cdefd6ad4715153386e11237d5"} Oct 11 05:04:33 crc kubenswrapper[4651]: I1011 05:04:33.666266 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/4354d16eef074529f4cafdce2fc6f7b790d4265439b68294d417143fbf94bw4" event={"ID":"7f8c3a2b-8cc3-4266-bdb2-e896769c8da4","Type":"ContainerStarted","Data":"4602e5a4a5c55c3c2ef1bf75e6c62cacffe829390bb9bdef8b181b44eebef8a1"} Oct 11 05:04:34 crc kubenswrapper[4651]: I1011 05:04:34.683500 4651 generic.go:334] "Generic (PLEG): container finished" podID="7f8c3a2b-8cc3-4266-bdb2-e896769c8da4" containerID="372d58b67f473a01eaeff24006e07d0e3b76824770ee3d70e714e4a9564a52a4" exitCode=0 Oct 11 05:04:34 crc kubenswrapper[4651]: I1011 05:04:34.683580 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/4354d16eef074529f4cafdce2fc6f7b790d4265439b68294d417143fbf94bw4" event={"ID":"7f8c3a2b-8cc3-4266-bdb2-e896769c8da4","Type":"ContainerDied","Data":"372d58b67f473a01eaeff24006e07d0e3b76824770ee3d70e714e4a9564a52a4"} Oct 11 05:04:35 crc kubenswrapper[4651]: I1011 05:04:35.693476 4651 generic.go:334] "Generic (PLEG): container finished" podID="7f8c3a2b-8cc3-4266-bdb2-e896769c8da4" containerID="c685abb37de4d3cd534093b64f79fbed597ee714969288ae15eaa72e32beacf0" exitCode=0 Oct 11 05:04:35 crc kubenswrapper[4651]: I1011 05:04:35.693543 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/4354d16eef074529f4cafdce2fc6f7b790d4265439b68294d417143fbf94bw4" event={"ID":"7f8c3a2b-8cc3-4266-bdb2-e896769c8da4","Type":"ContainerDied","Data":"c685abb37de4d3cd534093b64f79fbed597ee714969288ae15eaa72e32beacf0"} Oct 11 05:04:36 crc kubenswrapper[4651]: I1011 05:04:36.585038 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-pr2mb" Oct 11 05:04:36 crc kubenswrapper[4651]: I1011 05:04:36.585101 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-pr2mb" Oct 11 05:04:36 crc kubenswrapper[4651]: I1011 05:04:36.656228 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-pr2mb" Oct 11 05:04:36 crc kubenswrapper[4651]: I1011 05:04:36.776265 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-pr2mb" Oct 11 05:04:37 crc kubenswrapper[4651]: I1011 05:04:37.093525 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/4354d16eef074529f4cafdce2fc6f7b790d4265439b68294d417143fbf94bw4" Oct 11 05:04:37 crc kubenswrapper[4651]: I1011 05:04:37.205311 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/7f8c3a2b-8cc3-4266-bdb2-e896769c8da4-bundle\") pod \"7f8c3a2b-8cc3-4266-bdb2-e896769c8da4\" (UID: \"7f8c3a2b-8cc3-4266-bdb2-e896769c8da4\") " Oct 11 05:04:37 crc kubenswrapper[4651]: I1011 05:04:37.205371 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nvpkd\" (UniqueName: \"kubernetes.io/projected/7f8c3a2b-8cc3-4266-bdb2-e896769c8da4-kube-api-access-nvpkd\") pod \"7f8c3a2b-8cc3-4266-bdb2-e896769c8da4\" (UID: \"7f8c3a2b-8cc3-4266-bdb2-e896769c8da4\") " Oct 11 05:04:37 crc kubenswrapper[4651]: I1011 05:04:37.205473 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/7f8c3a2b-8cc3-4266-bdb2-e896769c8da4-util\") pod \"7f8c3a2b-8cc3-4266-bdb2-e896769c8da4\" (UID: \"7f8c3a2b-8cc3-4266-bdb2-e896769c8da4\") " Oct 11 05:04:37 crc kubenswrapper[4651]: I1011 05:04:37.206805 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7f8c3a2b-8cc3-4266-bdb2-e896769c8da4-bundle" (OuterVolumeSpecName: "bundle") pod "7f8c3a2b-8cc3-4266-bdb2-e896769c8da4" (UID: "7f8c3a2b-8cc3-4266-bdb2-e896769c8da4"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 05:04:37 crc kubenswrapper[4651]: I1011 05:04:37.213937 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7f8c3a2b-8cc3-4266-bdb2-e896769c8da4-kube-api-access-nvpkd" (OuterVolumeSpecName: "kube-api-access-nvpkd") pod "7f8c3a2b-8cc3-4266-bdb2-e896769c8da4" (UID: "7f8c3a2b-8cc3-4266-bdb2-e896769c8da4"). InnerVolumeSpecName "kube-api-access-nvpkd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:04:37 crc kubenswrapper[4651]: I1011 05:04:37.224332 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7f8c3a2b-8cc3-4266-bdb2-e896769c8da4-util" (OuterVolumeSpecName: "util") pod "7f8c3a2b-8cc3-4266-bdb2-e896769c8da4" (UID: "7f8c3a2b-8cc3-4266-bdb2-e896769c8da4"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 05:04:37 crc kubenswrapper[4651]: I1011 05:04:37.307423 4651 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/7f8c3a2b-8cc3-4266-bdb2-e896769c8da4-util\") on node \"crc\" DevicePath \"\"" Oct 11 05:04:37 crc kubenswrapper[4651]: I1011 05:04:37.307468 4651 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/7f8c3a2b-8cc3-4266-bdb2-e896769c8da4-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 05:04:37 crc kubenswrapper[4651]: I1011 05:04:37.307485 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nvpkd\" (UniqueName: \"kubernetes.io/projected/7f8c3a2b-8cc3-4266-bdb2-e896769c8da4-kube-api-access-nvpkd\") on node \"crc\" DevicePath \"\"" Oct 11 05:04:37 crc kubenswrapper[4651]: I1011 05:04:37.711023 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/4354d16eef074529f4cafdce2fc6f7b790d4265439b68294d417143fbf94bw4" event={"ID":"7f8c3a2b-8cc3-4266-bdb2-e896769c8da4","Type":"ContainerDied","Data":"4602e5a4a5c55c3c2ef1bf75e6c62cacffe829390bb9bdef8b181b44eebef8a1"} Oct 11 05:04:37 crc kubenswrapper[4651]: I1011 05:04:37.711110 4651 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4602e5a4a5c55c3c2ef1bf75e6c62cacffe829390bb9bdef8b181b44eebef8a1" Oct 11 05:04:37 crc kubenswrapper[4651]: I1011 05:04:37.711052 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/4354d16eef074529f4cafdce2fc6f7b790d4265439b68294d417143fbf94bw4" Oct 11 05:04:38 crc kubenswrapper[4651]: I1011 05:04:38.948234 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-pr2mb"] Oct 11 05:04:38 crc kubenswrapper[4651]: I1011 05:04:38.948630 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-pr2mb" podUID="b18db9ad-7e2e-41c7-815d-603953e750fb" containerName="registry-server" containerID="cri-o://22daaf07d85ba9e6bdd4c6d2485ec771a26b0dbc6ebac27ebfe5dd6c3a64ab9d" gracePeriod=2 Oct 11 05:04:39 crc kubenswrapper[4651]: I1011 05:04:39.529295 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pr2mb" Oct 11 05:04:39 crc kubenswrapper[4651]: I1011 05:04:39.640915 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b18db9ad-7e2e-41c7-815d-603953e750fb-catalog-content\") pod \"b18db9ad-7e2e-41c7-815d-603953e750fb\" (UID: \"b18db9ad-7e2e-41c7-815d-603953e750fb\") " Oct 11 05:04:39 crc kubenswrapper[4651]: I1011 05:04:39.641027 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lnl5c\" (UniqueName: \"kubernetes.io/projected/b18db9ad-7e2e-41c7-815d-603953e750fb-kube-api-access-lnl5c\") pod \"b18db9ad-7e2e-41c7-815d-603953e750fb\" (UID: \"b18db9ad-7e2e-41c7-815d-603953e750fb\") " Oct 11 05:04:39 crc kubenswrapper[4651]: I1011 05:04:39.641077 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b18db9ad-7e2e-41c7-815d-603953e750fb-utilities\") pod \"b18db9ad-7e2e-41c7-815d-603953e750fb\" (UID: \"b18db9ad-7e2e-41c7-815d-603953e750fb\") " Oct 11 05:04:39 crc kubenswrapper[4651]: I1011 05:04:39.642168 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b18db9ad-7e2e-41c7-815d-603953e750fb-utilities" (OuterVolumeSpecName: "utilities") pod "b18db9ad-7e2e-41c7-815d-603953e750fb" (UID: "b18db9ad-7e2e-41c7-815d-603953e750fb"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 05:04:39 crc kubenswrapper[4651]: I1011 05:04:39.648028 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b18db9ad-7e2e-41c7-815d-603953e750fb-kube-api-access-lnl5c" (OuterVolumeSpecName: "kube-api-access-lnl5c") pod "b18db9ad-7e2e-41c7-815d-603953e750fb" (UID: "b18db9ad-7e2e-41c7-815d-603953e750fb"). InnerVolumeSpecName "kube-api-access-lnl5c". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:04:39 crc kubenswrapper[4651]: I1011 05:04:39.657602 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b18db9ad-7e2e-41c7-815d-603953e750fb-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b18db9ad-7e2e-41c7-815d-603953e750fb" (UID: "b18db9ad-7e2e-41c7-815d-603953e750fb"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 05:04:39 crc kubenswrapper[4651]: I1011 05:04:39.730851 4651 generic.go:334] "Generic (PLEG): container finished" podID="b18db9ad-7e2e-41c7-815d-603953e750fb" containerID="22daaf07d85ba9e6bdd4c6d2485ec771a26b0dbc6ebac27ebfe5dd6c3a64ab9d" exitCode=0 Oct 11 05:04:39 crc kubenswrapper[4651]: I1011 05:04:39.730898 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pr2mb" event={"ID":"b18db9ad-7e2e-41c7-815d-603953e750fb","Type":"ContainerDied","Data":"22daaf07d85ba9e6bdd4c6d2485ec771a26b0dbc6ebac27ebfe5dd6c3a64ab9d"} Oct 11 05:04:39 crc kubenswrapper[4651]: I1011 05:04:39.730960 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pr2mb" Oct 11 05:04:39 crc kubenswrapper[4651]: I1011 05:04:39.731240 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pr2mb" event={"ID":"b18db9ad-7e2e-41c7-815d-603953e750fb","Type":"ContainerDied","Data":"624b05590dbea06aea0b7ae0ecd15d57a0308c4d5db6ba9e1b678367fd4c725f"} Oct 11 05:04:39 crc kubenswrapper[4651]: I1011 05:04:39.731269 4651 scope.go:117] "RemoveContainer" containerID="22daaf07d85ba9e6bdd4c6d2485ec771a26b0dbc6ebac27ebfe5dd6c3a64ab9d" Oct 11 05:04:39 crc kubenswrapper[4651]: I1011 05:04:39.742934 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lnl5c\" (UniqueName: \"kubernetes.io/projected/b18db9ad-7e2e-41c7-815d-603953e750fb-kube-api-access-lnl5c\") on node \"crc\" DevicePath \"\"" Oct 11 05:04:39 crc kubenswrapper[4651]: I1011 05:04:39.742964 4651 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b18db9ad-7e2e-41c7-815d-603953e750fb-utilities\") on node \"crc\" DevicePath \"\"" Oct 11 05:04:39 crc kubenswrapper[4651]: I1011 05:04:39.742977 4651 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b18db9ad-7e2e-41c7-815d-603953e750fb-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 11 05:04:39 crc kubenswrapper[4651]: I1011 05:04:39.762669 4651 scope.go:117] "RemoveContainer" containerID="a95d5a8527a87c48687d5ad008f77e18133c3ae52126a970cada35ee8f3ea450" Oct 11 05:04:39 crc kubenswrapper[4651]: I1011 05:04:39.777448 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-pr2mb"] Oct 11 05:04:39 crc kubenswrapper[4651]: I1011 05:04:39.782735 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-pr2mb"] Oct 11 05:04:39 crc kubenswrapper[4651]: I1011 05:04:39.806936 4651 scope.go:117] "RemoveContainer" containerID="358f18d0970fcb42b313816d5c3f8511ad32e13c94690a7fc926ec4f8e3b70bb" Oct 11 05:04:39 crc kubenswrapper[4651]: I1011 05:04:39.823511 4651 scope.go:117] "RemoveContainer" containerID="22daaf07d85ba9e6bdd4c6d2485ec771a26b0dbc6ebac27ebfe5dd6c3a64ab9d" Oct 11 05:04:39 crc kubenswrapper[4651]: E1011 05:04:39.824088 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"22daaf07d85ba9e6bdd4c6d2485ec771a26b0dbc6ebac27ebfe5dd6c3a64ab9d\": container with ID starting with 22daaf07d85ba9e6bdd4c6d2485ec771a26b0dbc6ebac27ebfe5dd6c3a64ab9d not found: ID does not exist" containerID="22daaf07d85ba9e6bdd4c6d2485ec771a26b0dbc6ebac27ebfe5dd6c3a64ab9d" Oct 11 05:04:39 crc kubenswrapper[4651]: I1011 05:04:39.824141 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"22daaf07d85ba9e6bdd4c6d2485ec771a26b0dbc6ebac27ebfe5dd6c3a64ab9d"} err="failed to get container status \"22daaf07d85ba9e6bdd4c6d2485ec771a26b0dbc6ebac27ebfe5dd6c3a64ab9d\": rpc error: code = NotFound desc = could not find container \"22daaf07d85ba9e6bdd4c6d2485ec771a26b0dbc6ebac27ebfe5dd6c3a64ab9d\": container with ID starting with 22daaf07d85ba9e6bdd4c6d2485ec771a26b0dbc6ebac27ebfe5dd6c3a64ab9d not found: ID does not exist" Oct 11 05:04:39 crc kubenswrapper[4651]: I1011 05:04:39.824162 4651 scope.go:117] "RemoveContainer" containerID="a95d5a8527a87c48687d5ad008f77e18133c3ae52126a970cada35ee8f3ea450" Oct 11 05:04:39 crc kubenswrapper[4651]: E1011 05:04:39.824506 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a95d5a8527a87c48687d5ad008f77e18133c3ae52126a970cada35ee8f3ea450\": container with ID starting with a95d5a8527a87c48687d5ad008f77e18133c3ae52126a970cada35ee8f3ea450 not found: ID does not exist" containerID="a95d5a8527a87c48687d5ad008f77e18133c3ae52126a970cada35ee8f3ea450" Oct 11 05:04:39 crc kubenswrapper[4651]: I1011 05:04:39.824529 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a95d5a8527a87c48687d5ad008f77e18133c3ae52126a970cada35ee8f3ea450"} err="failed to get container status \"a95d5a8527a87c48687d5ad008f77e18133c3ae52126a970cada35ee8f3ea450\": rpc error: code = NotFound desc = could not find container \"a95d5a8527a87c48687d5ad008f77e18133c3ae52126a970cada35ee8f3ea450\": container with ID starting with a95d5a8527a87c48687d5ad008f77e18133c3ae52126a970cada35ee8f3ea450 not found: ID does not exist" Oct 11 05:04:39 crc kubenswrapper[4651]: I1011 05:04:39.824543 4651 scope.go:117] "RemoveContainer" containerID="358f18d0970fcb42b313816d5c3f8511ad32e13c94690a7fc926ec4f8e3b70bb" Oct 11 05:04:39 crc kubenswrapper[4651]: E1011 05:04:39.824869 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"358f18d0970fcb42b313816d5c3f8511ad32e13c94690a7fc926ec4f8e3b70bb\": container with ID starting with 358f18d0970fcb42b313816d5c3f8511ad32e13c94690a7fc926ec4f8e3b70bb not found: ID does not exist" containerID="358f18d0970fcb42b313816d5c3f8511ad32e13c94690a7fc926ec4f8e3b70bb" Oct 11 05:04:39 crc kubenswrapper[4651]: I1011 05:04:39.824903 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"358f18d0970fcb42b313816d5c3f8511ad32e13c94690a7fc926ec4f8e3b70bb"} err="failed to get container status \"358f18d0970fcb42b313816d5c3f8511ad32e13c94690a7fc926ec4f8e3b70bb\": rpc error: code = NotFound desc = could not find container \"358f18d0970fcb42b313816d5c3f8511ad32e13c94690a7fc926ec4f8e3b70bb\": container with ID starting with 358f18d0970fcb42b313816d5c3f8511ad32e13c94690a7fc926ec4f8e3b70bb not found: ID does not exist" Oct 11 05:04:39 crc kubenswrapper[4651]: I1011 05:04:39.882570 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b18db9ad-7e2e-41c7-815d-603953e750fb" path="/var/lib/kubelet/pods/b18db9ad-7e2e-41c7-815d-603953e750fb/volumes" Oct 11 05:04:41 crc kubenswrapper[4651]: I1011 05:04:41.731619 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-operator-69c9cf8694-vccpk"] Oct 11 05:04:41 crc kubenswrapper[4651]: E1011 05:04:41.732123 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7f8c3a2b-8cc3-4266-bdb2-e896769c8da4" containerName="pull" Oct 11 05:04:41 crc kubenswrapper[4651]: I1011 05:04:41.732135 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="7f8c3a2b-8cc3-4266-bdb2-e896769c8da4" containerName="pull" Oct 11 05:04:41 crc kubenswrapper[4651]: E1011 05:04:41.732147 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b18db9ad-7e2e-41c7-815d-603953e750fb" containerName="registry-server" Oct 11 05:04:41 crc kubenswrapper[4651]: I1011 05:04:41.732153 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="b18db9ad-7e2e-41c7-815d-603953e750fb" containerName="registry-server" Oct 11 05:04:41 crc kubenswrapper[4651]: E1011 05:04:41.732161 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b18db9ad-7e2e-41c7-815d-603953e750fb" containerName="extract-utilities" Oct 11 05:04:41 crc kubenswrapper[4651]: I1011 05:04:41.732168 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="b18db9ad-7e2e-41c7-815d-603953e750fb" containerName="extract-utilities" Oct 11 05:04:41 crc kubenswrapper[4651]: E1011 05:04:41.732177 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b18db9ad-7e2e-41c7-815d-603953e750fb" containerName="extract-content" Oct 11 05:04:41 crc kubenswrapper[4651]: I1011 05:04:41.732182 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="b18db9ad-7e2e-41c7-815d-603953e750fb" containerName="extract-content" Oct 11 05:04:41 crc kubenswrapper[4651]: E1011 05:04:41.732194 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7f8c3a2b-8cc3-4266-bdb2-e896769c8da4" containerName="extract" Oct 11 05:04:41 crc kubenswrapper[4651]: I1011 05:04:41.732199 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="7f8c3a2b-8cc3-4266-bdb2-e896769c8da4" containerName="extract" Oct 11 05:04:41 crc kubenswrapper[4651]: E1011 05:04:41.732206 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7f8c3a2b-8cc3-4266-bdb2-e896769c8da4" containerName="util" Oct 11 05:04:41 crc kubenswrapper[4651]: I1011 05:04:41.732211 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="7f8c3a2b-8cc3-4266-bdb2-e896769c8da4" containerName="util" Oct 11 05:04:41 crc kubenswrapper[4651]: I1011 05:04:41.732310 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="7f8c3a2b-8cc3-4266-bdb2-e896769c8da4" containerName="extract" Oct 11 05:04:41 crc kubenswrapper[4651]: I1011 05:04:41.732328 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="b18db9ad-7e2e-41c7-815d-603953e750fb" containerName="registry-server" Oct 11 05:04:41 crc kubenswrapper[4651]: I1011 05:04:41.732930 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-69c9cf8694-vccpk" Oct 11 05:04:41 crc kubenswrapper[4651]: W1011 05:04:41.740275 4651 reflector.go:561] object-"openstack-operators"/"openstack-operator-controller-operator-dockercfg-prs9b": failed to list *v1.Secret: secrets "openstack-operator-controller-operator-dockercfg-prs9b" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openstack-operators": no relationship found between node 'crc' and this object Oct 11 05:04:41 crc kubenswrapper[4651]: E1011 05:04:41.740617 4651 reflector.go:158] "Unhandled Error" err="object-\"openstack-operators\"/\"openstack-operator-controller-operator-dockercfg-prs9b\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"openstack-operator-controller-operator-dockercfg-prs9b\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openstack-operators\": no relationship found between node 'crc' and this object" logger="UnhandledError" Oct 11 05:04:41 crc kubenswrapper[4651]: I1011 05:04:41.765722 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-599wx\" (UniqueName: \"kubernetes.io/projected/25b74915-17cd-4558-9801-5a0d5113b578-kube-api-access-599wx\") pod \"openstack-operator-controller-operator-69c9cf8694-vccpk\" (UID: \"25b74915-17cd-4558-9801-5a0d5113b578\") " pod="openstack-operators/openstack-operator-controller-operator-69c9cf8694-vccpk" Oct 11 05:04:41 crc kubenswrapper[4651]: I1011 05:04:41.771241 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-69c9cf8694-vccpk"] Oct 11 05:04:41 crc kubenswrapper[4651]: I1011 05:04:41.867469 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-599wx\" (UniqueName: \"kubernetes.io/projected/25b74915-17cd-4558-9801-5a0d5113b578-kube-api-access-599wx\") pod \"openstack-operator-controller-operator-69c9cf8694-vccpk\" (UID: \"25b74915-17cd-4558-9801-5a0d5113b578\") " pod="openstack-operators/openstack-operator-controller-operator-69c9cf8694-vccpk" Oct 11 05:04:41 crc kubenswrapper[4651]: I1011 05:04:41.885055 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-599wx\" (UniqueName: \"kubernetes.io/projected/25b74915-17cd-4558-9801-5a0d5113b578-kube-api-access-599wx\") pod \"openstack-operator-controller-operator-69c9cf8694-vccpk\" (UID: \"25b74915-17cd-4558-9801-5a0d5113b578\") " pod="openstack-operators/openstack-operator-controller-operator-69c9cf8694-vccpk" Oct 11 05:04:43 crc kubenswrapper[4651]: I1011 05:04:43.022079 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-operator-dockercfg-prs9b" Oct 11 05:04:43 crc kubenswrapper[4651]: I1011 05:04:43.029220 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-69c9cf8694-vccpk" Oct 11 05:04:43 crc kubenswrapper[4651]: I1011 05:04:43.522503 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-69c9cf8694-vccpk"] Oct 11 05:04:43 crc kubenswrapper[4651]: W1011 05:04:43.530567 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod25b74915_17cd_4558_9801_5a0d5113b578.slice/crio-ee8312ae155f3fab228ac07e6ad1e41fe3f70038806eab92c9027558193bc97a WatchSource:0}: Error finding container ee8312ae155f3fab228ac07e6ad1e41fe3f70038806eab92c9027558193bc97a: Status 404 returned error can't find the container with id ee8312ae155f3fab228ac07e6ad1e41fe3f70038806eab92c9027558193bc97a Oct 11 05:04:43 crc kubenswrapper[4651]: I1011 05:04:43.755916 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-69c9cf8694-vccpk" event={"ID":"25b74915-17cd-4558-9801-5a0d5113b578","Type":"ContainerStarted","Data":"ee8312ae155f3fab228ac07e6ad1e41fe3f70038806eab92c9027558193bc97a"} Oct 11 05:04:44 crc kubenswrapper[4651]: I1011 05:04:44.953870 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-dgghb"] Oct 11 05:04:44 crc kubenswrapper[4651]: I1011 05:04:44.957108 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dgghb" Oct 11 05:04:44 crc kubenswrapper[4651]: I1011 05:04:44.963609 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-dgghb"] Oct 11 05:04:45 crc kubenswrapper[4651]: I1011 05:04:45.006234 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5ef9c989-bc3c-400b-aacc-3f7b8875e2a0-catalog-content\") pod \"certified-operators-dgghb\" (UID: \"5ef9c989-bc3c-400b-aacc-3f7b8875e2a0\") " pod="openshift-marketplace/certified-operators-dgghb" Oct 11 05:04:45 crc kubenswrapper[4651]: I1011 05:04:45.006311 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5ef9c989-bc3c-400b-aacc-3f7b8875e2a0-utilities\") pod \"certified-operators-dgghb\" (UID: \"5ef9c989-bc3c-400b-aacc-3f7b8875e2a0\") " pod="openshift-marketplace/certified-operators-dgghb" Oct 11 05:04:45 crc kubenswrapper[4651]: I1011 05:04:45.006328 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w2pft\" (UniqueName: \"kubernetes.io/projected/5ef9c989-bc3c-400b-aacc-3f7b8875e2a0-kube-api-access-w2pft\") pod \"certified-operators-dgghb\" (UID: \"5ef9c989-bc3c-400b-aacc-3f7b8875e2a0\") " pod="openshift-marketplace/certified-operators-dgghb" Oct 11 05:04:45 crc kubenswrapper[4651]: I1011 05:04:45.107795 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5ef9c989-bc3c-400b-aacc-3f7b8875e2a0-catalog-content\") pod \"certified-operators-dgghb\" (UID: \"5ef9c989-bc3c-400b-aacc-3f7b8875e2a0\") " pod="openshift-marketplace/certified-operators-dgghb" Oct 11 05:04:45 crc kubenswrapper[4651]: I1011 05:04:45.107901 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5ef9c989-bc3c-400b-aacc-3f7b8875e2a0-utilities\") pod \"certified-operators-dgghb\" (UID: \"5ef9c989-bc3c-400b-aacc-3f7b8875e2a0\") " pod="openshift-marketplace/certified-operators-dgghb" Oct 11 05:04:45 crc kubenswrapper[4651]: I1011 05:04:45.107929 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w2pft\" (UniqueName: \"kubernetes.io/projected/5ef9c989-bc3c-400b-aacc-3f7b8875e2a0-kube-api-access-w2pft\") pod \"certified-operators-dgghb\" (UID: \"5ef9c989-bc3c-400b-aacc-3f7b8875e2a0\") " pod="openshift-marketplace/certified-operators-dgghb" Oct 11 05:04:45 crc kubenswrapper[4651]: I1011 05:04:45.108490 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5ef9c989-bc3c-400b-aacc-3f7b8875e2a0-utilities\") pod \"certified-operators-dgghb\" (UID: \"5ef9c989-bc3c-400b-aacc-3f7b8875e2a0\") " pod="openshift-marketplace/certified-operators-dgghb" Oct 11 05:04:45 crc kubenswrapper[4651]: I1011 05:04:45.108550 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5ef9c989-bc3c-400b-aacc-3f7b8875e2a0-catalog-content\") pod \"certified-operators-dgghb\" (UID: \"5ef9c989-bc3c-400b-aacc-3f7b8875e2a0\") " pod="openshift-marketplace/certified-operators-dgghb" Oct 11 05:04:45 crc kubenswrapper[4651]: I1011 05:04:45.135597 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w2pft\" (UniqueName: \"kubernetes.io/projected/5ef9c989-bc3c-400b-aacc-3f7b8875e2a0-kube-api-access-w2pft\") pod \"certified-operators-dgghb\" (UID: \"5ef9c989-bc3c-400b-aacc-3f7b8875e2a0\") " pod="openshift-marketplace/certified-operators-dgghb" Oct 11 05:04:45 crc kubenswrapper[4651]: I1011 05:04:45.284391 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dgghb" Oct 11 05:04:47 crc kubenswrapper[4651]: I1011 05:04:47.465387 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-dgghb"] Oct 11 05:04:47 crc kubenswrapper[4651]: I1011 05:04:47.786206 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-69c9cf8694-vccpk" event={"ID":"25b74915-17cd-4558-9801-5a0d5113b578","Type":"ContainerStarted","Data":"20d55d6908747648bfd5fe1ddcd9557ca3640f9567bef2cba35ad1b6bb85020b"} Oct 11 05:04:47 crc kubenswrapper[4651]: I1011 05:04:47.787731 4651 generic.go:334] "Generic (PLEG): container finished" podID="5ef9c989-bc3c-400b-aacc-3f7b8875e2a0" containerID="be7b4b4d5f2e4b6a2242ceda7a132de5ce65e67b32b36c8a7f2e9593ba195198" exitCode=0 Oct 11 05:04:47 crc kubenswrapper[4651]: I1011 05:04:47.787784 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dgghb" event={"ID":"5ef9c989-bc3c-400b-aacc-3f7b8875e2a0","Type":"ContainerDied","Data":"be7b4b4d5f2e4b6a2242ceda7a132de5ce65e67b32b36c8a7f2e9593ba195198"} Oct 11 05:04:47 crc kubenswrapper[4651]: I1011 05:04:47.787850 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dgghb" event={"ID":"5ef9c989-bc3c-400b-aacc-3f7b8875e2a0","Type":"ContainerStarted","Data":"e98f2948da863bf58a80cc93c0c48625ba8b4b1384cdcb925f4e36bad35249ef"} Oct 11 05:04:49 crc kubenswrapper[4651]: I1011 05:04:49.801120 4651 generic.go:334] "Generic (PLEG): container finished" podID="5ef9c989-bc3c-400b-aacc-3f7b8875e2a0" containerID="00e7edd585158e42c76b9fbb060cfe161bc12ec92ebc88c2d9c84c0c68a587bf" exitCode=0 Oct 11 05:04:49 crc kubenswrapper[4651]: I1011 05:04:49.801195 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dgghb" event={"ID":"5ef9c989-bc3c-400b-aacc-3f7b8875e2a0","Type":"ContainerDied","Data":"00e7edd585158e42c76b9fbb060cfe161bc12ec92ebc88c2d9c84c0c68a587bf"} Oct 11 05:04:49 crc kubenswrapper[4651]: I1011 05:04:49.803251 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-69c9cf8694-vccpk" event={"ID":"25b74915-17cd-4558-9801-5a0d5113b578","Type":"ContainerStarted","Data":"5f1ca5f79310553d7766b4004be9bd7f9027eb1997f9cacf64fcfe7cc6685855"} Oct 11 05:04:49 crc kubenswrapper[4651]: I1011 05:04:49.803417 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-operator-69c9cf8694-vccpk" Oct 11 05:04:49 crc kubenswrapper[4651]: I1011 05:04:49.868565 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-operator-69c9cf8694-vccpk" podStartSLOduration=3.02971083 podStartE2EDuration="8.868542792s" podCreationTimestamp="2025-10-11 05:04:41 +0000 UTC" firstStartedPulling="2025-10-11 05:04:43.533236218 +0000 UTC m=+804.429469024" lastFinishedPulling="2025-10-11 05:04:49.37206815 +0000 UTC m=+810.268300986" observedRunningTime="2025-10-11 05:04:49.863710509 +0000 UTC m=+810.759943335" watchObservedRunningTime="2025-10-11 05:04:49.868542792 +0000 UTC m=+810.764775628" Oct 11 05:04:50 crc kubenswrapper[4651]: I1011 05:04:50.813492 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dgghb" event={"ID":"5ef9c989-bc3c-400b-aacc-3f7b8875e2a0","Type":"ContainerStarted","Data":"07c41b088ab376f28bfd3ee03a65284b147c310ff5aebbab1b4c8e5535f30b9c"} Oct 11 05:04:50 crc kubenswrapper[4651]: I1011 05:04:50.839699 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-dgghb" podStartSLOduration=4.224527332 podStartE2EDuration="6.83966932s" podCreationTimestamp="2025-10-11 05:04:44 +0000 UTC" firstStartedPulling="2025-10-11 05:04:47.789181642 +0000 UTC m=+808.685414438" lastFinishedPulling="2025-10-11 05:04:50.40432363 +0000 UTC m=+811.300556426" observedRunningTime="2025-10-11 05:04:50.838112601 +0000 UTC m=+811.734345447" watchObservedRunningTime="2025-10-11 05:04:50.83966932 +0000 UTC m=+811.735902166" Oct 11 05:04:53 crc kubenswrapper[4651]: I1011 05:04:53.035722 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-operator-69c9cf8694-vccpk" Oct 11 05:04:55 crc kubenswrapper[4651]: I1011 05:04:55.285074 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-dgghb" Oct 11 05:04:55 crc kubenswrapper[4651]: I1011 05:04:55.285257 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-dgghb" Oct 11 05:04:55 crc kubenswrapper[4651]: I1011 05:04:55.341977 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-dgghb" Oct 11 05:04:55 crc kubenswrapper[4651]: I1011 05:04:55.914452 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-dgghb" Oct 11 05:04:56 crc kubenswrapper[4651]: I1011 05:04:56.181508 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-dgghb"] Oct 11 05:04:57 crc kubenswrapper[4651]: I1011 05:04:57.869551 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-dgghb" podUID="5ef9c989-bc3c-400b-aacc-3f7b8875e2a0" containerName="registry-server" containerID="cri-o://07c41b088ab376f28bfd3ee03a65284b147c310ff5aebbab1b4c8e5535f30b9c" gracePeriod=2 Oct 11 05:04:58 crc kubenswrapper[4651]: I1011 05:04:58.286028 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dgghb" Oct 11 05:04:58 crc kubenswrapper[4651]: I1011 05:04:58.411543 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5ef9c989-bc3c-400b-aacc-3f7b8875e2a0-catalog-content\") pod \"5ef9c989-bc3c-400b-aacc-3f7b8875e2a0\" (UID: \"5ef9c989-bc3c-400b-aacc-3f7b8875e2a0\") " Oct 11 05:04:58 crc kubenswrapper[4651]: I1011 05:04:58.411628 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w2pft\" (UniqueName: \"kubernetes.io/projected/5ef9c989-bc3c-400b-aacc-3f7b8875e2a0-kube-api-access-w2pft\") pod \"5ef9c989-bc3c-400b-aacc-3f7b8875e2a0\" (UID: \"5ef9c989-bc3c-400b-aacc-3f7b8875e2a0\") " Oct 11 05:04:58 crc kubenswrapper[4651]: I1011 05:04:58.411670 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5ef9c989-bc3c-400b-aacc-3f7b8875e2a0-utilities\") pod \"5ef9c989-bc3c-400b-aacc-3f7b8875e2a0\" (UID: \"5ef9c989-bc3c-400b-aacc-3f7b8875e2a0\") " Oct 11 05:04:58 crc kubenswrapper[4651]: I1011 05:04:58.412555 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5ef9c989-bc3c-400b-aacc-3f7b8875e2a0-utilities" (OuterVolumeSpecName: "utilities") pod "5ef9c989-bc3c-400b-aacc-3f7b8875e2a0" (UID: "5ef9c989-bc3c-400b-aacc-3f7b8875e2a0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 05:04:58 crc kubenswrapper[4651]: I1011 05:04:58.418208 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5ef9c989-bc3c-400b-aacc-3f7b8875e2a0-kube-api-access-w2pft" (OuterVolumeSpecName: "kube-api-access-w2pft") pod "5ef9c989-bc3c-400b-aacc-3f7b8875e2a0" (UID: "5ef9c989-bc3c-400b-aacc-3f7b8875e2a0"). InnerVolumeSpecName "kube-api-access-w2pft". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:04:58 crc kubenswrapper[4651]: I1011 05:04:58.492064 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5ef9c989-bc3c-400b-aacc-3f7b8875e2a0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5ef9c989-bc3c-400b-aacc-3f7b8875e2a0" (UID: "5ef9c989-bc3c-400b-aacc-3f7b8875e2a0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 05:04:58 crc kubenswrapper[4651]: I1011 05:04:58.513557 4651 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5ef9c989-bc3c-400b-aacc-3f7b8875e2a0-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 11 05:04:58 crc kubenswrapper[4651]: I1011 05:04:58.513592 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w2pft\" (UniqueName: \"kubernetes.io/projected/5ef9c989-bc3c-400b-aacc-3f7b8875e2a0-kube-api-access-w2pft\") on node \"crc\" DevicePath \"\"" Oct 11 05:04:58 crc kubenswrapper[4651]: I1011 05:04:58.513604 4651 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5ef9c989-bc3c-400b-aacc-3f7b8875e2a0-utilities\") on node \"crc\" DevicePath \"\"" Oct 11 05:04:58 crc kubenswrapper[4651]: I1011 05:04:58.876142 4651 generic.go:334] "Generic (PLEG): container finished" podID="5ef9c989-bc3c-400b-aacc-3f7b8875e2a0" containerID="07c41b088ab376f28bfd3ee03a65284b147c310ff5aebbab1b4c8e5535f30b9c" exitCode=0 Oct 11 05:04:58 crc kubenswrapper[4651]: I1011 05:04:58.876181 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dgghb" event={"ID":"5ef9c989-bc3c-400b-aacc-3f7b8875e2a0","Type":"ContainerDied","Data":"07c41b088ab376f28bfd3ee03a65284b147c310ff5aebbab1b4c8e5535f30b9c"} Oct 11 05:04:58 crc kubenswrapper[4651]: I1011 05:04:58.876206 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dgghb" event={"ID":"5ef9c989-bc3c-400b-aacc-3f7b8875e2a0","Type":"ContainerDied","Data":"e98f2948da863bf58a80cc93c0c48625ba8b4b1384cdcb925f4e36bad35249ef"} Oct 11 05:04:58 crc kubenswrapper[4651]: I1011 05:04:58.876208 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dgghb" Oct 11 05:04:58 crc kubenswrapper[4651]: I1011 05:04:58.876224 4651 scope.go:117] "RemoveContainer" containerID="07c41b088ab376f28bfd3ee03a65284b147c310ff5aebbab1b4c8e5535f30b9c" Oct 11 05:04:58 crc kubenswrapper[4651]: I1011 05:04:58.894303 4651 scope.go:117] "RemoveContainer" containerID="00e7edd585158e42c76b9fbb060cfe161bc12ec92ebc88c2d9c84c0c68a587bf" Oct 11 05:04:58 crc kubenswrapper[4651]: I1011 05:04:58.914737 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-dgghb"] Oct 11 05:04:58 crc kubenswrapper[4651]: I1011 05:04:58.924662 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-dgghb"] Oct 11 05:04:58 crc kubenswrapper[4651]: I1011 05:04:58.931767 4651 scope.go:117] "RemoveContainer" containerID="be7b4b4d5f2e4b6a2242ceda7a132de5ce65e67b32b36c8a7f2e9593ba195198" Oct 11 05:04:58 crc kubenswrapper[4651]: I1011 05:04:58.955795 4651 scope.go:117] "RemoveContainer" containerID="07c41b088ab376f28bfd3ee03a65284b147c310ff5aebbab1b4c8e5535f30b9c" Oct 11 05:04:58 crc kubenswrapper[4651]: E1011 05:04:58.959210 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"07c41b088ab376f28bfd3ee03a65284b147c310ff5aebbab1b4c8e5535f30b9c\": container with ID starting with 07c41b088ab376f28bfd3ee03a65284b147c310ff5aebbab1b4c8e5535f30b9c not found: ID does not exist" containerID="07c41b088ab376f28bfd3ee03a65284b147c310ff5aebbab1b4c8e5535f30b9c" Oct 11 05:04:58 crc kubenswrapper[4651]: I1011 05:04:58.959252 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"07c41b088ab376f28bfd3ee03a65284b147c310ff5aebbab1b4c8e5535f30b9c"} err="failed to get container status \"07c41b088ab376f28bfd3ee03a65284b147c310ff5aebbab1b4c8e5535f30b9c\": rpc error: code = NotFound desc = could not find container \"07c41b088ab376f28bfd3ee03a65284b147c310ff5aebbab1b4c8e5535f30b9c\": container with ID starting with 07c41b088ab376f28bfd3ee03a65284b147c310ff5aebbab1b4c8e5535f30b9c not found: ID does not exist" Oct 11 05:04:58 crc kubenswrapper[4651]: I1011 05:04:58.959280 4651 scope.go:117] "RemoveContainer" containerID="00e7edd585158e42c76b9fbb060cfe161bc12ec92ebc88c2d9c84c0c68a587bf" Oct 11 05:04:58 crc kubenswrapper[4651]: E1011 05:04:58.963473 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"00e7edd585158e42c76b9fbb060cfe161bc12ec92ebc88c2d9c84c0c68a587bf\": container with ID starting with 00e7edd585158e42c76b9fbb060cfe161bc12ec92ebc88c2d9c84c0c68a587bf not found: ID does not exist" containerID="00e7edd585158e42c76b9fbb060cfe161bc12ec92ebc88c2d9c84c0c68a587bf" Oct 11 05:04:58 crc kubenswrapper[4651]: I1011 05:04:58.963513 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"00e7edd585158e42c76b9fbb060cfe161bc12ec92ebc88c2d9c84c0c68a587bf"} err="failed to get container status \"00e7edd585158e42c76b9fbb060cfe161bc12ec92ebc88c2d9c84c0c68a587bf\": rpc error: code = NotFound desc = could not find container \"00e7edd585158e42c76b9fbb060cfe161bc12ec92ebc88c2d9c84c0c68a587bf\": container with ID starting with 00e7edd585158e42c76b9fbb060cfe161bc12ec92ebc88c2d9c84c0c68a587bf not found: ID does not exist" Oct 11 05:04:58 crc kubenswrapper[4651]: I1011 05:04:58.963537 4651 scope.go:117] "RemoveContainer" containerID="be7b4b4d5f2e4b6a2242ceda7a132de5ce65e67b32b36c8a7f2e9593ba195198" Oct 11 05:04:58 crc kubenswrapper[4651]: E1011 05:04:58.968200 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"be7b4b4d5f2e4b6a2242ceda7a132de5ce65e67b32b36c8a7f2e9593ba195198\": container with ID starting with be7b4b4d5f2e4b6a2242ceda7a132de5ce65e67b32b36c8a7f2e9593ba195198 not found: ID does not exist" containerID="be7b4b4d5f2e4b6a2242ceda7a132de5ce65e67b32b36c8a7f2e9593ba195198" Oct 11 05:04:58 crc kubenswrapper[4651]: I1011 05:04:58.968240 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"be7b4b4d5f2e4b6a2242ceda7a132de5ce65e67b32b36c8a7f2e9593ba195198"} err="failed to get container status \"be7b4b4d5f2e4b6a2242ceda7a132de5ce65e67b32b36c8a7f2e9593ba195198\": rpc error: code = NotFound desc = could not find container \"be7b4b4d5f2e4b6a2242ceda7a132de5ce65e67b32b36c8a7f2e9593ba195198\": container with ID starting with be7b4b4d5f2e4b6a2242ceda7a132de5ce65e67b32b36c8a7f2e9593ba195198 not found: ID does not exist" Oct 11 05:04:59 crc kubenswrapper[4651]: I1011 05:04:59.876083 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5ef9c989-bc3c-400b-aacc-3f7b8875e2a0" path="/var/lib/kubelet/pods/5ef9c989-bc3c-400b-aacc-3f7b8875e2a0/volumes" Oct 11 05:05:09 crc kubenswrapper[4651]: I1011 05:05:09.948782 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-64f84fcdbb-gbn6q"] Oct 11 05:05:09 crc kubenswrapper[4651]: E1011 05:05:09.949533 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ef9c989-bc3c-400b-aacc-3f7b8875e2a0" containerName="extract-utilities" Oct 11 05:05:09 crc kubenswrapper[4651]: I1011 05:05:09.949545 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ef9c989-bc3c-400b-aacc-3f7b8875e2a0" containerName="extract-utilities" Oct 11 05:05:09 crc kubenswrapper[4651]: E1011 05:05:09.949555 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ef9c989-bc3c-400b-aacc-3f7b8875e2a0" containerName="registry-server" Oct 11 05:05:09 crc kubenswrapper[4651]: I1011 05:05:09.949561 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ef9c989-bc3c-400b-aacc-3f7b8875e2a0" containerName="registry-server" Oct 11 05:05:09 crc kubenswrapper[4651]: E1011 05:05:09.949574 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ef9c989-bc3c-400b-aacc-3f7b8875e2a0" containerName="extract-content" Oct 11 05:05:09 crc kubenswrapper[4651]: I1011 05:05:09.949580 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ef9c989-bc3c-400b-aacc-3f7b8875e2a0" containerName="extract-content" Oct 11 05:05:09 crc kubenswrapper[4651]: I1011 05:05:09.949673 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="5ef9c989-bc3c-400b-aacc-3f7b8875e2a0" containerName="registry-server" Oct 11 05:05:09 crc kubenswrapper[4651]: I1011 05:05:09.950275 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-64f84fcdbb-gbn6q" Oct 11 05:05:09 crc kubenswrapper[4651]: I1011 05:05:09.952082 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-hwbr6" Oct 11 05:05:09 crc kubenswrapper[4651]: I1011 05:05:09.967430 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-59cdc64769-xtlln"] Oct 11 05:05:09 crc kubenswrapper[4651]: I1011 05:05:09.968379 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-59cdc64769-xtlln" Oct 11 05:05:09 crc kubenswrapper[4651]: I1011 05:05:09.970314 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-fv8dz" Oct 11 05:05:09 crc kubenswrapper[4651]: I1011 05:05:09.989223 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-59cdc64769-xtlln"] Oct 11 05:05:09 crc kubenswrapper[4651]: I1011 05:05:09.994985 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-687df44cdb-g8m5f"] Oct 11 05:05:09 crc kubenswrapper[4651]: I1011 05:05:09.995924 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-687df44cdb-g8m5f" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.001550 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-7bb46cd7d-t88s8"] Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.001995 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-lvfmx" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.002703 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-7bb46cd7d-t88s8" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.007526 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-dzb9j" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.019646 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-7bb46cd7d-t88s8"] Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.028065 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-64f84fcdbb-gbn6q"] Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.033704 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-687df44cdb-g8m5f"] Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.057898 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-6d9967f8dd-prncj"] Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.059074 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-6d9967f8dd-prncj" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.061609 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-4fknt" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.062375 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gfcvz\" (UniqueName: \"kubernetes.io/projected/d725cd21-efdc-4182-be84-460db3042d11-kube-api-access-gfcvz\") pod \"heat-operator-controller-manager-6d9967f8dd-prncj\" (UID: \"d725cd21-efdc-4182-be84-460db3042d11\") " pod="openstack-operators/heat-operator-controller-manager-6d9967f8dd-prncj" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.062431 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7f95k\" (UniqueName: \"kubernetes.io/projected/cca7099f-c5ef-4109-91f5-b6831d0771e8-kube-api-access-7f95k\") pod \"barbican-operator-controller-manager-64f84fcdbb-gbn6q\" (UID: \"cca7099f-c5ef-4109-91f5-b6831d0771e8\") " pod="openstack-operators/barbican-operator-controller-manager-64f84fcdbb-gbn6q" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.062459 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d94vj\" (UniqueName: \"kubernetes.io/projected/75692ce0-1ecb-4db6-a831-5740382b17e2-kube-api-access-d94vj\") pod \"designate-operator-controller-manager-687df44cdb-g8m5f\" (UID: \"75692ce0-1ecb-4db6-a831-5740382b17e2\") " pod="openstack-operators/designate-operator-controller-manager-687df44cdb-g8m5f" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.062508 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q75vp\" (UniqueName: \"kubernetes.io/projected/b5b061bd-7d85-4960-a956-95c7911591a2-kube-api-access-q75vp\") pod \"cinder-operator-controller-manager-59cdc64769-xtlln\" (UID: \"b5b061bd-7d85-4960-a956-95c7911591a2\") " pod="openstack-operators/cinder-operator-controller-manager-59cdc64769-xtlln" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.062522 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cm5cr\" (UniqueName: \"kubernetes.io/projected/99b28924-6f7a-4232-8fd5-b245178ce2ea-kube-api-access-cm5cr\") pod \"glance-operator-controller-manager-7bb46cd7d-t88s8\" (UID: \"99b28924-6f7a-4232-8fd5-b245178ce2ea\") " pod="openstack-operators/glance-operator-controller-manager-7bb46cd7d-t88s8" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.064457 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-6d74794d9b-c9bzn"] Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.065566 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-6d74794d9b-c9bzn" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.068431 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-2j8d2" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.079134 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-8678f847b6-vpnkk"] Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.080187 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-8678f847b6-vpnkk" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.087137 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-cfwj8" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.087690 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.095323 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-6d9967f8dd-prncj"] Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.119907 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-8678f847b6-vpnkk"] Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.126192 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-6d74794d9b-c9bzn"] Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.138864 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-74cb5cbc49-fwr7j"] Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.139872 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-74cb5cbc49-fwr7j" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.146218 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-wz58q" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.161799 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-74cb5cbc49-fwr7j"] Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.169518 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-ddb98f99b-989mj"] Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.170358 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gfcvz\" (UniqueName: \"kubernetes.io/projected/d725cd21-efdc-4182-be84-460db3042d11-kube-api-access-gfcvz\") pod \"heat-operator-controller-manager-6d9967f8dd-prncj\" (UID: \"d725cd21-efdc-4182-be84-460db3042d11\") " pod="openstack-operators/heat-operator-controller-manager-6d9967f8dd-prncj" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.170408 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/50a25e99-d2ec-4b16-a5fa-894e79ee528e-cert\") pod \"infra-operator-controller-manager-8678f847b6-vpnkk\" (UID: \"50a25e99-d2ec-4b16-a5fa-894e79ee528e\") " pod="openstack-operators/infra-operator-controller-manager-8678f847b6-vpnkk" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.170440 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-58q2b\" (UniqueName: \"kubernetes.io/projected/205c5753-c94e-4bf4-993f-36b798bb489d-kube-api-access-58q2b\") pod \"ironic-operator-controller-manager-74cb5cbc49-fwr7j\" (UID: \"205c5753-c94e-4bf4-993f-36b798bb489d\") " pod="openstack-operators/ironic-operator-controller-manager-74cb5cbc49-fwr7j" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.170528 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7f95k\" (UniqueName: \"kubernetes.io/projected/cca7099f-c5ef-4109-91f5-b6831d0771e8-kube-api-access-7f95k\") pod \"barbican-operator-controller-manager-64f84fcdbb-gbn6q\" (UID: \"cca7099f-c5ef-4109-91f5-b6831d0771e8\") " pod="openstack-operators/barbican-operator-controller-manager-64f84fcdbb-gbn6q" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.170555 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-ddb98f99b-989mj" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.170565 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d94vj\" (UniqueName: \"kubernetes.io/projected/75692ce0-1ecb-4db6-a831-5740382b17e2-kube-api-access-d94vj\") pod \"designate-operator-controller-manager-687df44cdb-g8m5f\" (UID: \"75692ce0-1ecb-4db6-a831-5740382b17e2\") " pod="openstack-operators/designate-operator-controller-manager-687df44cdb-g8m5f" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.170631 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q75vp\" (UniqueName: \"kubernetes.io/projected/b5b061bd-7d85-4960-a956-95c7911591a2-kube-api-access-q75vp\") pod \"cinder-operator-controller-manager-59cdc64769-xtlln\" (UID: \"b5b061bd-7d85-4960-a956-95c7911591a2\") " pod="openstack-operators/cinder-operator-controller-manager-59cdc64769-xtlln" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.170648 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cm5cr\" (UniqueName: \"kubernetes.io/projected/99b28924-6f7a-4232-8fd5-b245178ce2ea-kube-api-access-cm5cr\") pod \"glance-operator-controller-manager-7bb46cd7d-t88s8\" (UID: \"99b28924-6f7a-4232-8fd5-b245178ce2ea\") " pod="openstack-operators/glance-operator-controller-manager-7bb46cd7d-t88s8" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.170673 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2brsd\" (UniqueName: \"kubernetes.io/projected/dd5122d9-c098-49cf-9723-bc0c31c6ce3b-kube-api-access-2brsd\") pod \"horizon-operator-controller-manager-6d74794d9b-c9bzn\" (UID: \"dd5122d9-c098-49cf-9723-bc0c31c6ce3b\") " pod="openstack-operators/horizon-operator-controller-manager-6d74794d9b-c9bzn" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.170691 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8n5s7\" (UniqueName: \"kubernetes.io/projected/50a25e99-d2ec-4b16-a5fa-894e79ee528e-kube-api-access-8n5s7\") pod \"infra-operator-controller-manager-8678f847b6-vpnkk\" (UID: \"50a25e99-d2ec-4b16-a5fa-894e79ee528e\") " pod="openstack-operators/infra-operator-controller-manager-8678f847b6-vpnkk" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.179088 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-hd7dj" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.187246 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-59578bc799-9d9sm"] Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.189360 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-59578bc799-9d9sm" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.191646 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-ldwp7" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.193570 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-5777b4f897-zdcb9"] Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.242564 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q75vp\" (UniqueName: \"kubernetes.io/projected/b5b061bd-7d85-4960-a956-95c7911591a2-kube-api-access-q75vp\") pod \"cinder-operator-controller-manager-59cdc64769-xtlln\" (UID: \"b5b061bd-7d85-4960-a956-95c7911591a2\") " pod="openstack-operators/cinder-operator-controller-manager-59cdc64769-xtlln" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.243801 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-5777b4f897-zdcb9" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.244302 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-ddb98f99b-989mj"] Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.250487 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d94vj\" (UniqueName: \"kubernetes.io/projected/75692ce0-1ecb-4db6-a831-5740382b17e2-kube-api-access-d94vj\") pod \"designate-operator-controller-manager-687df44cdb-g8m5f\" (UID: \"75692ce0-1ecb-4db6-a831-5740382b17e2\") " pod="openstack-operators/designate-operator-controller-manager-687df44cdb-g8m5f" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.259921 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-mlpxh" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.261771 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7f95k\" (UniqueName: \"kubernetes.io/projected/cca7099f-c5ef-4109-91f5-b6831d0771e8-kube-api-access-7f95k\") pod \"barbican-operator-controller-manager-64f84fcdbb-gbn6q\" (UID: \"cca7099f-c5ef-4109-91f5-b6831d0771e8\") " pod="openstack-operators/barbican-operator-controller-manager-64f84fcdbb-gbn6q" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.262509 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gfcvz\" (UniqueName: \"kubernetes.io/projected/d725cd21-efdc-4182-be84-460db3042d11-kube-api-access-gfcvz\") pod \"heat-operator-controller-manager-6d9967f8dd-prncj\" (UID: \"d725cd21-efdc-4182-be84-460db3042d11\") " pod="openstack-operators/heat-operator-controller-manager-6d9967f8dd-prncj" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.264355 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cm5cr\" (UniqueName: \"kubernetes.io/projected/99b28924-6f7a-4232-8fd5-b245178ce2ea-kube-api-access-cm5cr\") pod \"glance-operator-controller-manager-7bb46cd7d-t88s8\" (UID: \"99b28924-6f7a-4232-8fd5-b245178ce2ea\") " pod="openstack-operators/glance-operator-controller-manager-7bb46cd7d-t88s8" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.269384 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-64f84fcdbb-gbn6q" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.273607 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/50a25e99-d2ec-4b16-a5fa-894e79ee528e-cert\") pod \"infra-operator-controller-manager-8678f847b6-vpnkk\" (UID: \"50a25e99-d2ec-4b16-a5fa-894e79ee528e\") " pod="openstack-operators/infra-operator-controller-manager-8678f847b6-vpnkk" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.273652 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-58q2b\" (UniqueName: \"kubernetes.io/projected/205c5753-c94e-4bf4-993f-36b798bb489d-kube-api-access-58q2b\") pod \"ironic-operator-controller-manager-74cb5cbc49-fwr7j\" (UID: \"205c5753-c94e-4bf4-993f-36b798bb489d\") " pod="openstack-operators/ironic-operator-controller-manager-74cb5cbc49-fwr7j" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.273699 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rgnkd\" (UniqueName: \"kubernetes.io/projected/519c3c0c-07ee-4f48-ba92-d202190d9a49-kube-api-access-rgnkd\") pod \"mariadb-operator-controller-manager-5777b4f897-zdcb9\" (UID: \"519c3c0c-07ee-4f48-ba92-d202190d9a49\") " pod="openstack-operators/mariadb-operator-controller-manager-5777b4f897-zdcb9" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.273783 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r276h\" (UniqueName: \"kubernetes.io/projected/5901a38b-902a-4822-8483-9d478e61aa40-kube-api-access-r276h\") pod \"manila-operator-controller-manager-59578bc799-9d9sm\" (UID: \"5901a38b-902a-4822-8483-9d478e61aa40\") " pod="openstack-operators/manila-operator-controller-manager-59578bc799-9d9sm" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.273837 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2fqjw\" (UniqueName: \"kubernetes.io/projected/b6b16d99-7f05-464a-a338-dcded4fa42fa-kube-api-access-2fqjw\") pod \"keystone-operator-controller-manager-ddb98f99b-989mj\" (UID: \"b6b16d99-7f05-464a-a338-dcded4fa42fa\") " pod="openstack-operators/keystone-operator-controller-manager-ddb98f99b-989mj" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.273870 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2brsd\" (UniqueName: \"kubernetes.io/projected/dd5122d9-c098-49cf-9723-bc0c31c6ce3b-kube-api-access-2brsd\") pod \"horizon-operator-controller-manager-6d74794d9b-c9bzn\" (UID: \"dd5122d9-c098-49cf-9723-bc0c31c6ce3b\") " pod="openstack-operators/horizon-operator-controller-manager-6d74794d9b-c9bzn" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.273893 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8n5s7\" (UniqueName: \"kubernetes.io/projected/50a25e99-d2ec-4b16-a5fa-894e79ee528e-kube-api-access-8n5s7\") pod \"infra-operator-controller-manager-8678f847b6-vpnkk\" (UID: \"50a25e99-d2ec-4b16-a5fa-894e79ee528e\") " pod="openstack-operators/infra-operator-controller-manager-8678f847b6-vpnkk" Oct 11 05:05:10 crc kubenswrapper[4651]: E1011 05:05:10.274191 4651 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Oct 11 05:05:10 crc kubenswrapper[4651]: E1011 05:05:10.274245 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/50a25e99-d2ec-4b16-a5fa-894e79ee528e-cert podName:50a25e99-d2ec-4b16-a5fa-894e79ee528e nodeName:}" failed. No retries permitted until 2025-10-11 05:05:10.774225621 +0000 UTC m=+831.670458417 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/50a25e99-d2ec-4b16-a5fa-894e79ee528e-cert") pod "infra-operator-controller-manager-8678f847b6-vpnkk" (UID: "50a25e99-d2ec-4b16-a5fa-894e79ee528e") : secret "infra-operator-webhook-server-cert" not found Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.274812 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-59578bc799-9d9sm"] Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.287358 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-59cdc64769-xtlln" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.313313 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8n5s7\" (UniqueName: \"kubernetes.io/projected/50a25e99-d2ec-4b16-a5fa-894e79ee528e-kube-api-access-8n5s7\") pod \"infra-operator-controller-manager-8678f847b6-vpnkk\" (UID: \"50a25e99-d2ec-4b16-a5fa-894e79ee528e\") " pod="openstack-operators/infra-operator-controller-manager-8678f847b6-vpnkk" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.313990 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-5777b4f897-zdcb9"] Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.316894 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-58q2b\" (UniqueName: \"kubernetes.io/projected/205c5753-c94e-4bf4-993f-36b798bb489d-kube-api-access-58q2b\") pod \"ironic-operator-controller-manager-74cb5cbc49-fwr7j\" (UID: \"205c5753-c94e-4bf4-993f-36b798bb489d\") " pod="openstack-operators/ironic-operator-controller-manager-74cb5cbc49-fwr7j" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.322627 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-687df44cdb-g8m5f" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.322868 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2brsd\" (UniqueName: \"kubernetes.io/projected/dd5122d9-c098-49cf-9723-bc0c31c6ce3b-kube-api-access-2brsd\") pod \"horizon-operator-controller-manager-6d74794d9b-c9bzn\" (UID: \"dd5122d9-c098-49cf-9723-bc0c31c6ce3b\") " pod="openstack-operators/horizon-operator-controller-manager-6d74794d9b-c9bzn" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.331274 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-797d478b46-xgxfd"] Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.332844 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-797d478b46-xgxfd" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.336587 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-lwfqq" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.336756 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-7bb46cd7d-t88s8" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.355038 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-57bb74c7bf-7mhch"] Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.356065 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-57bb74c7bf-7mhch" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.358609 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-dr8fk" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.375014 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r276h\" (UniqueName: \"kubernetes.io/projected/5901a38b-902a-4822-8483-9d478e61aa40-kube-api-access-r276h\") pod \"manila-operator-controller-manager-59578bc799-9d9sm\" (UID: \"5901a38b-902a-4822-8483-9d478e61aa40\") " pod="openstack-operators/manila-operator-controller-manager-59578bc799-9d9sm" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.375059 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2fqjw\" (UniqueName: \"kubernetes.io/projected/b6b16d99-7f05-464a-a338-dcded4fa42fa-kube-api-access-2fqjw\") pod \"keystone-operator-controller-manager-ddb98f99b-989mj\" (UID: \"b6b16d99-7f05-464a-a338-dcded4fa42fa\") " pod="openstack-operators/keystone-operator-controller-manager-ddb98f99b-989mj" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.375111 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rgnkd\" (UniqueName: \"kubernetes.io/projected/519c3c0c-07ee-4f48-ba92-d202190d9a49-kube-api-access-rgnkd\") pod \"mariadb-operator-controller-manager-5777b4f897-zdcb9\" (UID: \"519c3c0c-07ee-4f48-ba92-d202190d9a49\") " pod="openstack-operators/mariadb-operator-controller-manager-5777b4f897-zdcb9" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.377604 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-6d7c7ddf95-27r6g"] Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.379282 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-6d7c7ddf95-27r6g" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.383716 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-57bb74c7bf-7mhch"] Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.387960 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-czg54" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.392976 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-6d7c7ddf95-27r6g"] Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.393162 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-797d478b46-xgxfd"] Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.393268 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rgnkd\" (UniqueName: \"kubernetes.io/projected/519c3c0c-07ee-4f48-ba92-d202190d9a49-kube-api-access-rgnkd\") pod \"mariadb-operator-controller-manager-5777b4f897-zdcb9\" (UID: \"519c3c0c-07ee-4f48-ba92-d202190d9a49\") " pod="openstack-operators/mariadb-operator-controller-manager-5777b4f897-zdcb9" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.395783 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-869cc7797f-6m2qz"] Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.395875 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r276h\" (UniqueName: \"kubernetes.io/projected/5901a38b-902a-4822-8483-9d478e61aa40-kube-api-access-r276h\") pod \"manila-operator-controller-manager-59578bc799-9d9sm\" (UID: \"5901a38b-902a-4822-8483-9d478e61aa40\") " pod="openstack-operators/manila-operator-controller-manager-59578bc799-9d9sm" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.397047 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-869cc7797f-6m2qz" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.399541 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2fqjw\" (UniqueName: \"kubernetes.io/projected/b6b16d99-7f05-464a-a338-dcded4fa42fa-kube-api-access-2fqjw\") pod \"keystone-operator-controller-manager-ddb98f99b-989mj\" (UID: \"b6b16d99-7f05-464a-a338-dcded4fa42fa\") " pod="openstack-operators/keystone-operator-controller-manager-ddb98f99b-989mj" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.399694 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-fj7z6" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.400911 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-6cc7fb757dxbxc6"] Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.401965 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6cc7fb757dxbxc6" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.403967 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.404046 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-hqpp9" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.405954 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-869cc7797f-6m2qz"] Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.410564 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-6d9967f8dd-prncj" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.412100 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-664664cb68-5s2zz"] Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.413428 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-664664cb68-5s2zz" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.419738 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f4d5dfdc6-vmqd8"] Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.426490 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-6cc7fb757dxbxc6"] Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.426863 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-6d74794d9b-c9bzn" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.427699 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-5f4d5dfdc6-vmqd8" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.431575 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-dlgwq" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.431603 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-664664cb68-5s2zz"] Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.449580 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f4d5dfdc6-vmqd8"] Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.457702 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-74cb5cbc49-fwr7j" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.457973 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-578874c84d-x9x8w"] Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.458663 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-2nmvz" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.459071 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-578874c84d-x9x8w" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.460761 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-fdt8x" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.463775 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-578874c84d-x9x8w"] Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.476544 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5c8cx\" (UniqueName: \"kubernetes.io/projected/fcc9418a-3e9d-4c74-849d-b9884077820c-kube-api-access-5c8cx\") pod \"neutron-operator-controller-manager-797d478b46-xgxfd\" (UID: \"fcc9418a-3e9d-4c74-849d-b9884077820c\") " pod="openstack-operators/neutron-operator-controller-manager-797d478b46-xgxfd" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.476641 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2ts24\" (UniqueName: \"kubernetes.io/projected/321665c4-bc9a-47e0-a6c4-a54d56ad5ce8-kube-api-access-2ts24\") pod \"nova-operator-controller-manager-57bb74c7bf-7mhch\" (UID: \"321665c4-bc9a-47e0-a6c4-a54d56ad5ce8\") " pod="openstack-operators/nova-operator-controller-manager-57bb74c7bf-7mhch" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.481178 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-ffcdd6c94-p9d4d"] Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.482325 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-ffcdd6c94-p9d4d" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.487342 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-ffcdd6c94-p9d4d"] Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.488267 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-nr6ft" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.544204 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-ddb98f99b-989mj" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.591766 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2ts24\" (UniqueName: \"kubernetes.io/projected/321665c4-bc9a-47e0-a6c4-a54d56ad5ce8-kube-api-access-2ts24\") pod \"nova-operator-controller-manager-57bb74c7bf-7mhch\" (UID: \"321665c4-bc9a-47e0-a6c4-a54d56ad5ce8\") " pod="openstack-operators/nova-operator-controller-manager-57bb74c7bf-7mhch" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.591827 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/9105b503-cbba-48d1-acdb-ac21b7c791b4-cert\") pod \"openstack-baremetal-operator-controller-manager-6cc7fb757dxbxc6\" (UID: \"9105b503-cbba-48d1-acdb-ac21b7c791b4\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6cc7fb757dxbxc6" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.591875 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p7txb\" (UniqueName: \"kubernetes.io/projected/9105b503-cbba-48d1-acdb-ac21b7c791b4-kube-api-access-p7txb\") pod \"openstack-baremetal-operator-controller-manager-6cc7fb757dxbxc6\" (UID: \"9105b503-cbba-48d1-acdb-ac21b7c791b4\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6cc7fb757dxbxc6" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.591907 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x29qh\" (UniqueName: \"kubernetes.io/projected/f4175bba-9aae-4faf-8670-f612f867827e-kube-api-access-x29qh\") pod \"swift-operator-controller-manager-5f4d5dfdc6-vmqd8\" (UID: \"f4175bba-9aae-4faf-8670-f612f867827e\") " pod="openstack-operators/swift-operator-controller-manager-5f4d5dfdc6-vmqd8" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.591941 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5c8cx\" (UniqueName: \"kubernetes.io/projected/fcc9418a-3e9d-4c74-849d-b9884077820c-kube-api-access-5c8cx\") pod \"neutron-operator-controller-manager-797d478b46-xgxfd\" (UID: \"fcc9418a-3e9d-4c74-849d-b9884077820c\") " pod="openstack-operators/neutron-operator-controller-manager-797d478b46-xgxfd" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.591974 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g2tf5\" (UniqueName: \"kubernetes.io/projected/0e634116-91fc-4fad-b906-a998e77ea3e4-kube-api-access-g2tf5\") pod \"test-operator-controller-manager-ffcdd6c94-p9d4d\" (UID: \"0e634116-91fc-4fad-b906-a998e77ea3e4\") " pod="openstack-operators/test-operator-controller-manager-ffcdd6c94-p9d4d" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.592007 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qf4z9\" (UniqueName: \"kubernetes.io/projected/4aede6f1-d9d0-4c62-b118-7c93fa2af789-kube-api-access-qf4z9\") pod \"octavia-operator-controller-manager-6d7c7ddf95-27r6g\" (UID: \"4aede6f1-d9d0-4c62-b118-7c93fa2af789\") " pod="openstack-operators/octavia-operator-controller-manager-6d7c7ddf95-27r6g" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.592033 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-frhct\" (UniqueName: \"kubernetes.io/projected/555de60b-f68b-42a6-a662-d1e5202a30c5-kube-api-access-frhct\") pod \"telemetry-operator-controller-manager-578874c84d-x9x8w\" (UID: \"555de60b-f68b-42a6-a662-d1e5202a30c5\") " pod="openstack-operators/telemetry-operator-controller-manager-578874c84d-x9x8w" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.592058 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5xgsc\" (UniqueName: \"kubernetes.io/projected/160294e4-b990-41b3-8f6c-22102366d72c-kube-api-access-5xgsc\") pod \"placement-operator-controller-manager-664664cb68-5s2zz\" (UID: \"160294e4-b990-41b3-8f6c-22102366d72c\") " pod="openstack-operators/placement-operator-controller-manager-664664cb68-5s2zz" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.592076 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dkzlm\" (UniqueName: \"kubernetes.io/projected/c9dabf8d-2991-4af0-99c8-084e157e9b52-kube-api-access-dkzlm\") pod \"ovn-operator-controller-manager-869cc7797f-6m2qz\" (UID: \"c9dabf8d-2991-4af0-99c8-084e157e9b52\") " pod="openstack-operators/ovn-operator-controller-manager-869cc7797f-6m2qz" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.622016 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2ts24\" (UniqueName: \"kubernetes.io/projected/321665c4-bc9a-47e0-a6c4-a54d56ad5ce8-kube-api-access-2ts24\") pod \"nova-operator-controller-manager-57bb74c7bf-7mhch\" (UID: \"321665c4-bc9a-47e0-a6c4-a54d56ad5ce8\") " pod="openstack-operators/nova-operator-controller-manager-57bb74c7bf-7mhch" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.649616 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5c8cx\" (UniqueName: \"kubernetes.io/projected/fcc9418a-3e9d-4c74-849d-b9884077820c-kube-api-access-5c8cx\") pod \"neutron-operator-controller-manager-797d478b46-xgxfd\" (UID: \"fcc9418a-3e9d-4c74-849d-b9884077820c\") " pod="openstack-operators/neutron-operator-controller-manager-797d478b46-xgxfd" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.660616 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-59578bc799-9d9sm" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.676042 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-5777b4f897-zdcb9" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.718335 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-797d478b46-xgxfd" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.719553 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dkzlm\" (UniqueName: \"kubernetes.io/projected/c9dabf8d-2991-4af0-99c8-084e157e9b52-kube-api-access-dkzlm\") pod \"ovn-operator-controller-manager-869cc7797f-6m2qz\" (UID: \"c9dabf8d-2991-4af0-99c8-084e157e9b52\") " pod="openstack-operators/ovn-operator-controller-manager-869cc7797f-6m2qz" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.719629 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/9105b503-cbba-48d1-acdb-ac21b7c791b4-cert\") pod \"openstack-baremetal-operator-controller-manager-6cc7fb757dxbxc6\" (UID: \"9105b503-cbba-48d1-acdb-ac21b7c791b4\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6cc7fb757dxbxc6" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.719690 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p7txb\" (UniqueName: \"kubernetes.io/projected/9105b503-cbba-48d1-acdb-ac21b7c791b4-kube-api-access-p7txb\") pod \"openstack-baremetal-operator-controller-manager-6cc7fb757dxbxc6\" (UID: \"9105b503-cbba-48d1-acdb-ac21b7c791b4\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6cc7fb757dxbxc6" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.719723 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x29qh\" (UniqueName: \"kubernetes.io/projected/f4175bba-9aae-4faf-8670-f612f867827e-kube-api-access-x29qh\") pod \"swift-operator-controller-manager-5f4d5dfdc6-vmqd8\" (UID: \"f4175bba-9aae-4faf-8670-f612f867827e\") " pod="openstack-operators/swift-operator-controller-manager-5f4d5dfdc6-vmqd8" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.719780 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g2tf5\" (UniqueName: \"kubernetes.io/projected/0e634116-91fc-4fad-b906-a998e77ea3e4-kube-api-access-g2tf5\") pod \"test-operator-controller-manager-ffcdd6c94-p9d4d\" (UID: \"0e634116-91fc-4fad-b906-a998e77ea3e4\") " pod="openstack-operators/test-operator-controller-manager-ffcdd6c94-p9d4d" Oct 11 05:05:10 crc kubenswrapper[4651]: E1011 05:05:10.719918 4651 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Oct 11 05:05:10 crc kubenswrapper[4651]: E1011 05:05:10.720222 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9105b503-cbba-48d1-acdb-ac21b7c791b4-cert podName:9105b503-cbba-48d1-acdb-ac21b7c791b4 nodeName:}" failed. No retries permitted until 2025-10-11 05:05:11.219992896 +0000 UTC m=+832.116225692 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/9105b503-cbba-48d1-acdb-ac21b7c791b4-cert") pod "openstack-baremetal-operator-controller-manager-6cc7fb757dxbxc6" (UID: "9105b503-cbba-48d1-acdb-ac21b7c791b4") : secret "openstack-baremetal-operator-webhook-server-cert" not found Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.720408 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-57bb74c7bf-7mhch" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.723916 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qf4z9\" (UniqueName: \"kubernetes.io/projected/4aede6f1-d9d0-4c62-b118-7c93fa2af789-kube-api-access-qf4z9\") pod \"octavia-operator-controller-manager-6d7c7ddf95-27r6g\" (UID: \"4aede6f1-d9d0-4c62-b118-7c93fa2af789\") " pod="openstack-operators/octavia-operator-controller-manager-6d7c7ddf95-27r6g" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.723962 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-frhct\" (UniqueName: \"kubernetes.io/projected/555de60b-f68b-42a6-a662-d1e5202a30c5-kube-api-access-frhct\") pod \"telemetry-operator-controller-manager-578874c84d-x9x8w\" (UID: \"555de60b-f68b-42a6-a662-d1e5202a30c5\") " pod="openstack-operators/telemetry-operator-controller-manager-578874c84d-x9x8w" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.724000 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5xgsc\" (UniqueName: \"kubernetes.io/projected/160294e4-b990-41b3-8f6c-22102366d72c-kube-api-access-5xgsc\") pod \"placement-operator-controller-manager-664664cb68-5s2zz\" (UID: \"160294e4-b990-41b3-8f6c-22102366d72c\") " pod="openstack-operators/placement-operator-controller-manager-664664cb68-5s2zz" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.772024 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dkzlm\" (UniqueName: \"kubernetes.io/projected/c9dabf8d-2991-4af0-99c8-084e157e9b52-kube-api-access-dkzlm\") pod \"ovn-operator-controller-manager-869cc7797f-6m2qz\" (UID: \"c9dabf8d-2991-4af0-99c8-084e157e9b52\") " pod="openstack-operators/ovn-operator-controller-manager-869cc7797f-6m2qz" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.791165 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-646675d848-7w8pk"] Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.791402 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-869cc7797f-6m2qz" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.813100 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-646675d848-7w8pk" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.825360 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/50a25e99-d2ec-4b16-a5fa-894e79ee528e-cert\") pod \"infra-operator-controller-manager-8678f847b6-vpnkk\" (UID: \"50a25e99-d2ec-4b16-a5fa-894e79ee528e\") " pod="openstack-operators/infra-operator-controller-manager-8678f847b6-vpnkk" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.825469 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bdgxm\" (UniqueName: \"kubernetes.io/projected/9bfc9d92-8b6d-4b13-9759-b7185e1f16bb-kube-api-access-bdgxm\") pod \"watcher-operator-controller-manager-646675d848-7w8pk\" (UID: \"9bfc9d92-8b6d-4b13-9759-b7185e1f16bb\") " pod="openstack-operators/watcher-operator-controller-manager-646675d848-7w8pk" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.836614 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-t8f9q" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.845240 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/50a25e99-d2ec-4b16-a5fa-894e79ee528e-cert\") pod \"infra-operator-controller-manager-8678f847b6-vpnkk\" (UID: \"50a25e99-d2ec-4b16-a5fa-894e79ee528e\") " pod="openstack-operators/infra-operator-controller-manager-8678f847b6-vpnkk" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.845510 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p7txb\" (UniqueName: \"kubernetes.io/projected/9105b503-cbba-48d1-acdb-ac21b7c791b4-kube-api-access-p7txb\") pod \"openstack-baremetal-operator-controller-manager-6cc7fb757dxbxc6\" (UID: \"9105b503-cbba-48d1-acdb-ac21b7c791b4\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6cc7fb757dxbxc6" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.849460 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qf4z9\" (UniqueName: \"kubernetes.io/projected/4aede6f1-d9d0-4c62-b118-7c93fa2af789-kube-api-access-qf4z9\") pod \"octavia-operator-controller-manager-6d7c7ddf95-27r6g\" (UID: \"4aede6f1-d9d0-4c62-b118-7c93fa2af789\") " pod="openstack-operators/octavia-operator-controller-manager-6d7c7ddf95-27r6g" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.849628 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x29qh\" (UniqueName: \"kubernetes.io/projected/f4175bba-9aae-4faf-8670-f612f867827e-kube-api-access-x29qh\") pod \"swift-operator-controller-manager-5f4d5dfdc6-vmqd8\" (UID: \"f4175bba-9aae-4faf-8670-f612f867827e\") " pod="openstack-operators/swift-operator-controller-manager-5f4d5dfdc6-vmqd8" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.854483 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g2tf5\" (UniqueName: \"kubernetes.io/projected/0e634116-91fc-4fad-b906-a998e77ea3e4-kube-api-access-g2tf5\") pod \"test-operator-controller-manager-ffcdd6c94-p9d4d\" (UID: \"0e634116-91fc-4fad-b906-a998e77ea3e4\") " pod="openstack-operators/test-operator-controller-manager-ffcdd6c94-p9d4d" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.854577 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5xgsc\" (UniqueName: \"kubernetes.io/projected/160294e4-b990-41b3-8f6c-22102366d72c-kube-api-access-5xgsc\") pod \"placement-operator-controller-manager-664664cb68-5s2zz\" (UID: \"160294e4-b990-41b3-8f6c-22102366d72c\") " pod="openstack-operators/placement-operator-controller-manager-664664cb68-5s2zz" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.858176 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-frhct\" (UniqueName: \"kubernetes.io/projected/555de60b-f68b-42a6-a662-d1e5202a30c5-kube-api-access-frhct\") pod \"telemetry-operator-controller-manager-578874c84d-x9x8w\" (UID: \"555de60b-f68b-42a6-a662-d1e5202a30c5\") " pod="openstack-operators/telemetry-operator-controller-manager-578874c84d-x9x8w" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.865312 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-646675d848-7w8pk"] Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.882764 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-776b77588d-7z9rb"] Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.888301 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-776b77588d-7z9rb" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.893408 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-z5qj2" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.893615 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.896880 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-776b77588d-7z9rb"] Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.901627 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-5f4d5dfdc6-vmqd8" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.910873 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-dfqdz"] Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.911776 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-dfqdz" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.917120 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-9zq6n" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.925997 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-dfqdz"] Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.928083 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bdgxm\" (UniqueName: \"kubernetes.io/projected/9bfc9d92-8b6d-4b13-9759-b7185e1f16bb-kube-api-access-bdgxm\") pod \"watcher-operator-controller-manager-646675d848-7w8pk\" (UID: \"9bfc9d92-8b6d-4b13-9759-b7185e1f16bb\") " pod="openstack-operators/watcher-operator-controller-manager-646675d848-7w8pk" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.928202 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-56gsv\" (UniqueName: \"kubernetes.io/projected/bd36db45-dfcf-4d27-8bfb-fcefeff7f0ba-kube-api-access-56gsv\") pod \"rabbitmq-cluster-operator-manager-5f97d8c699-dfqdz\" (UID: \"bd36db45-dfcf-4d27-8bfb-fcefeff7f0ba\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-dfqdz" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.928234 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/7bcc02c2-c9c4-498e-8f95-ace0d1b98899-cert\") pod \"openstack-operator-controller-manager-776b77588d-7z9rb\" (UID: \"7bcc02c2-c9c4-498e-8f95-ace0d1b98899\") " pod="openstack-operators/openstack-operator-controller-manager-776b77588d-7z9rb" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.928305 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kjz7z\" (UniqueName: \"kubernetes.io/projected/7bcc02c2-c9c4-498e-8f95-ace0d1b98899-kube-api-access-kjz7z\") pod \"openstack-operator-controller-manager-776b77588d-7z9rb\" (UID: \"7bcc02c2-c9c4-498e-8f95-ace0d1b98899\") " pod="openstack-operators/openstack-operator-controller-manager-776b77588d-7z9rb" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.957925 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bdgxm\" (UniqueName: \"kubernetes.io/projected/9bfc9d92-8b6d-4b13-9759-b7185e1f16bb-kube-api-access-bdgxm\") pod \"watcher-operator-controller-manager-646675d848-7w8pk\" (UID: \"9bfc9d92-8b6d-4b13-9759-b7185e1f16bb\") " pod="openstack-operators/watcher-operator-controller-manager-646675d848-7w8pk" Oct 11 05:05:10 crc kubenswrapper[4651]: I1011 05:05:10.967131 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-59cdc64769-xtlln"] Oct 11 05:05:10 crc kubenswrapper[4651]: W1011 05:05:10.990976 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb5b061bd_7d85_4960_a956_95c7911591a2.slice/crio-8660b6e7fba7699dd111ee457e3ff05a2acba935c40211dfae4aa04bbb8280cc WatchSource:0}: Error finding container 8660b6e7fba7699dd111ee457e3ff05a2acba935c40211dfae4aa04bbb8280cc: Status 404 returned error can't find the container with id 8660b6e7fba7699dd111ee457e3ff05a2acba935c40211dfae4aa04bbb8280cc Oct 11 05:05:11 crc kubenswrapper[4651]: I1011 05:05:11.000810 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-64f84fcdbb-gbn6q"] Oct 11 05:05:11 crc kubenswrapper[4651]: I1011 05:05:11.007416 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-7bb46cd7d-t88s8"] Oct 11 05:05:11 crc kubenswrapper[4651]: I1011 05:05:11.012688 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-578874c84d-x9x8w" Oct 11 05:05:11 crc kubenswrapper[4651]: I1011 05:05:11.029504 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-56gsv\" (UniqueName: \"kubernetes.io/projected/bd36db45-dfcf-4d27-8bfb-fcefeff7f0ba-kube-api-access-56gsv\") pod \"rabbitmq-cluster-operator-manager-5f97d8c699-dfqdz\" (UID: \"bd36db45-dfcf-4d27-8bfb-fcefeff7f0ba\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-dfqdz" Oct 11 05:05:11 crc kubenswrapper[4651]: I1011 05:05:11.029567 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/7bcc02c2-c9c4-498e-8f95-ace0d1b98899-cert\") pod \"openstack-operator-controller-manager-776b77588d-7z9rb\" (UID: \"7bcc02c2-c9c4-498e-8f95-ace0d1b98899\") " pod="openstack-operators/openstack-operator-controller-manager-776b77588d-7z9rb" Oct 11 05:05:11 crc kubenswrapper[4651]: I1011 05:05:11.029611 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kjz7z\" (UniqueName: \"kubernetes.io/projected/7bcc02c2-c9c4-498e-8f95-ace0d1b98899-kube-api-access-kjz7z\") pod \"openstack-operator-controller-manager-776b77588d-7z9rb\" (UID: \"7bcc02c2-c9c4-498e-8f95-ace0d1b98899\") " pod="openstack-operators/openstack-operator-controller-manager-776b77588d-7z9rb" Oct 11 05:05:11 crc kubenswrapper[4651]: E1011 05:05:11.030224 4651 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Oct 11 05:05:11 crc kubenswrapper[4651]: E1011 05:05:11.030304 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7bcc02c2-c9c4-498e-8f95-ace0d1b98899-cert podName:7bcc02c2-c9c4-498e-8f95-ace0d1b98899 nodeName:}" failed. No retries permitted until 2025-10-11 05:05:11.530286282 +0000 UTC m=+832.426519078 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/7bcc02c2-c9c4-498e-8f95-ace0d1b98899-cert") pod "openstack-operator-controller-manager-776b77588d-7z9rb" (UID: "7bcc02c2-c9c4-498e-8f95-ace0d1b98899") : secret "webhook-server-cert" not found Oct 11 05:05:11 crc kubenswrapper[4651]: I1011 05:05:11.043186 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-8678f847b6-vpnkk" Oct 11 05:05:11 crc kubenswrapper[4651]: I1011 05:05:11.054793 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-56gsv\" (UniqueName: \"kubernetes.io/projected/bd36db45-dfcf-4d27-8bfb-fcefeff7f0ba-kube-api-access-56gsv\") pod \"rabbitmq-cluster-operator-manager-5f97d8c699-dfqdz\" (UID: \"bd36db45-dfcf-4d27-8bfb-fcefeff7f0ba\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-dfqdz" Oct 11 05:05:11 crc kubenswrapper[4651]: I1011 05:05:11.057145 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kjz7z\" (UniqueName: \"kubernetes.io/projected/7bcc02c2-c9c4-498e-8f95-ace0d1b98899-kube-api-access-kjz7z\") pod \"openstack-operator-controller-manager-776b77588d-7z9rb\" (UID: \"7bcc02c2-c9c4-498e-8f95-ace0d1b98899\") " pod="openstack-operators/openstack-operator-controller-manager-776b77588d-7z9rb" Oct 11 05:05:11 crc kubenswrapper[4651]: I1011 05:05:11.067595 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-6d7c7ddf95-27r6g" Oct 11 05:05:11 crc kubenswrapper[4651]: I1011 05:05:11.077580 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-ffcdd6c94-p9d4d" Oct 11 05:05:11 crc kubenswrapper[4651]: I1011 05:05:11.144758 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-664664cb68-5s2zz" Oct 11 05:05:11 crc kubenswrapper[4651]: I1011 05:05:11.160942 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-646675d848-7w8pk" Oct 11 05:05:11 crc kubenswrapper[4651]: I1011 05:05:11.162926 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-687df44cdb-g8m5f"] Oct 11 05:05:11 crc kubenswrapper[4651]: I1011 05:05:11.237860 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/9105b503-cbba-48d1-acdb-ac21b7c791b4-cert\") pod \"openstack-baremetal-operator-controller-manager-6cc7fb757dxbxc6\" (UID: \"9105b503-cbba-48d1-acdb-ac21b7c791b4\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6cc7fb757dxbxc6" Oct 11 05:05:11 crc kubenswrapper[4651]: I1011 05:05:11.245094 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/9105b503-cbba-48d1-acdb-ac21b7c791b4-cert\") pod \"openstack-baremetal-operator-controller-manager-6cc7fb757dxbxc6\" (UID: \"9105b503-cbba-48d1-acdb-ac21b7c791b4\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6cc7fb757dxbxc6" Oct 11 05:05:11 crc kubenswrapper[4651]: I1011 05:05:11.264867 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-dfqdz" Oct 11 05:05:11 crc kubenswrapper[4651]: I1011 05:05:11.318191 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-6d9967f8dd-prncj"] Oct 11 05:05:11 crc kubenswrapper[4651]: I1011 05:05:11.403465 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6cc7fb757dxbxc6" Oct 11 05:05:11 crc kubenswrapper[4651]: I1011 05:05:11.475254 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-74cb5cbc49-fwr7j"] Oct 11 05:05:11 crc kubenswrapper[4651]: I1011 05:05:11.484453 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-ddb98f99b-989mj"] Oct 11 05:05:11 crc kubenswrapper[4651]: I1011 05:05:11.488957 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-6d74794d9b-c9bzn"] Oct 11 05:05:11 crc kubenswrapper[4651]: W1011 05:05:11.502252 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod205c5753_c94e_4bf4_993f_36b798bb489d.slice/crio-f1f59f07fad34e14e67033db65940fc7c8c6e70cebe6a505d5063212223dc4db WatchSource:0}: Error finding container f1f59f07fad34e14e67033db65940fc7c8c6e70cebe6a505d5063212223dc4db: Status 404 returned error can't find the container with id f1f59f07fad34e14e67033db65940fc7c8c6e70cebe6a505d5063212223dc4db Oct 11 05:05:11 crc kubenswrapper[4651]: W1011 05:05:11.503559 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddd5122d9_c098_49cf_9723_bc0c31c6ce3b.slice/crio-50a1d6460c53603579b13a3560d70acd75c698efd93d75b7eb6d88cd1bb466d1 WatchSource:0}: Error finding container 50a1d6460c53603579b13a3560d70acd75c698efd93d75b7eb6d88cd1bb466d1: Status 404 returned error can't find the container with id 50a1d6460c53603579b13a3560d70acd75c698efd93d75b7eb6d88cd1bb466d1 Oct 11 05:05:11 crc kubenswrapper[4651]: I1011 05:05:11.552578 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/7bcc02c2-c9c4-498e-8f95-ace0d1b98899-cert\") pod \"openstack-operator-controller-manager-776b77588d-7z9rb\" (UID: \"7bcc02c2-c9c4-498e-8f95-ace0d1b98899\") " pod="openstack-operators/openstack-operator-controller-manager-776b77588d-7z9rb" Oct 11 05:05:11 crc kubenswrapper[4651]: I1011 05:05:11.555888 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/7bcc02c2-c9c4-498e-8f95-ace0d1b98899-cert\") pod \"openstack-operator-controller-manager-776b77588d-7z9rb\" (UID: \"7bcc02c2-c9c4-498e-8f95-ace0d1b98899\") " pod="openstack-operators/openstack-operator-controller-manager-776b77588d-7z9rb" Oct 11 05:05:11 crc kubenswrapper[4651]: I1011 05:05:11.662255 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-59578bc799-9d9sm"] Oct 11 05:05:11 crc kubenswrapper[4651]: I1011 05:05:11.667710 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-57bb74c7bf-7mhch"] Oct 11 05:05:11 crc kubenswrapper[4651]: I1011 05:05:11.677375 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-5777b4f897-zdcb9"] Oct 11 05:05:11 crc kubenswrapper[4651]: W1011 05:05:11.679851 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod321665c4_bc9a_47e0_a6c4_a54d56ad5ce8.slice/crio-5c5341202f06e5fc8b4f05f671c6a371924ac61b19cbe5b8ed287d5f76ce5e71 WatchSource:0}: Error finding container 5c5341202f06e5fc8b4f05f671c6a371924ac61b19cbe5b8ed287d5f76ce5e71: Status 404 returned error can't find the container with id 5c5341202f06e5fc8b4f05f671c6a371924ac61b19cbe5b8ed287d5f76ce5e71 Oct 11 05:05:11 crc kubenswrapper[4651]: W1011 05:05:11.686293 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod519c3c0c_07ee_4f48_ba92_d202190d9a49.slice/crio-cf781331b418b8714a503aa7b69e637f5bf8d578628ecc3ac86abf4a393043bb WatchSource:0}: Error finding container cf781331b418b8714a503aa7b69e637f5bf8d578628ecc3ac86abf4a393043bb: Status 404 returned error can't find the container with id cf781331b418b8714a503aa7b69e637f5bf8d578628ecc3ac86abf4a393043bb Oct 11 05:05:11 crc kubenswrapper[4651]: W1011 05:05:11.688713 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5901a38b_902a_4822_8483_9d478e61aa40.slice/crio-be806412364e2fa79361450ddcc9ed6565e97cf11dc56edcfe2a8a0ed8589a56 WatchSource:0}: Error finding container be806412364e2fa79361450ddcc9ed6565e97cf11dc56edcfe2a8a0ed8589a56: Status 404 returned error can't find the container with id be806412364e2fa79361450ddcc9ed6565e97cf11dc56edcfe2a8a0ed8589a56 Oct 11 05:05:11 crc kubenswrapper[4651]: I1011 05:05:11.846635 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-776b77588d-7z9rb" Oct 11 05:05:11 crc kubenswrapper[4651]: I1011 05:05:11.898860 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-6d7c7ddf95-27r6g"] Oct 11 05:05:11 crc kubenswrapper[4651]: W1011 05:05:11.903758 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4aede6f1_d9d0_4c62_b118_7c93fa2af789.slice/crio-91d9cadaab12c4a7e6e2a58fa6bb317143d534759b18db087475b30804102c26 WatchSource:0}: Error finding container 91d9cadaab12c4a7e6e2a58fa6bb317143d534759b18db087475b30804102c26: Status 404 returned error can't find the container with id 91d9cadaab12c4a7e6e2a58fa6bb317143d534759b18db087475b30804102c26 Oct 11 05:05:11 crc kubenswrapper[4651]: I1011 05:05:11.978646 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-6d9967f8dd-prncj" event={"ID":"d725cd21-efdc-4182-be84-460db3042d11","Type":"ContainerStarted","Data":"bc4ef844e8764d1d4d74f001602c25398a7332c3aadc6f8daeea15701dc8281d"} Oct 11 05:05:11 crc kubenswrapper[4651]: I1011 05:05:11.979911 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-7bb46cd7d-t88s8" event={"ID":"99b28924-6f7a-4232-8fd5-b245178ce2ea","Type":"ContainerStarted","Data":"5cd2a26d7a6a29692eeeca5c45e519a5c29e73855351c7d9f6516f20535a5e91"} Oct 11 05:05:11 crc kubenswrapper[4651]: I1011 05:05:11.980768 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-64f84fcdbb-gbn6q" event={"ID":"cca7099f-c5ef-4109-91f5-b6831d0771e8","Type":"ContainerStarted","Data":"e2a5960913f3ee84002252c78086b7c385b1b59d252b7cbb1a4efcdc0355e123"} Oct 11 05:05:11 crc kubenswrapper[4651]: I1011 05:05:11.981492 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-ddb98f99b-989mj" event={"ID":"b6b16d99-7f05-464a-a338-dcded4fa42fa","Type":"ContainerStarted","Data":"462a3278907c6ceead64acec395c5900429075db850fefca9a4ec0cbc2f411e9"} Oct 11 05:05:11 crc kubenswrapper[4651]: I1011 05:05:11.982319 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-687df44cdb-g8m5f" event={"ID":"75692ce0-1ecb-4db6-a831-5740382b17e2","Type":"ContainerStarted","Data":"3f2ef003b7c20fc9fb1e9e8cfe82f13e5f9de62a52b6750c17c1c4b68bdf3b71"} Oct 11 05:05:11 crc kubenswrapper[4651]: I1011 05:05:11.983110 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-6d7c7ddf95-27r6g" event={"ID":"4aede6f1-d9d0-4c62-b118-7c93fa2af789","Type":"ContainerStarted","Data":"91d9cadaab12c4a7e6e2a58fa6bb317143d534759b18db087475b30804102c26"} Oct 11 05:05:11 crc kubenswrapper[4651]: I1011 05:05:11.984073 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-57bb74c7bf-7mhch" event={"ID":"321665c4-bc9a-47e0-a6c4-a54d56ad5ce8","Type":"ContainerStarted","Data":"5c5341202f06e5fc8b4f05f671c6a371924ac61b19cbe5b8ed287d5f76ce5e71"} Oct 11 05:05:11 crc kubenswrapper[4651]: I1011 05:05:11.985254 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-6d74794d9b-c9bzn" event={"ID":"dd5122d9-c098-49cf-9723-bc0c31c6ce3b","Type":"ContainerStarted","Data":"50a1d6460c53603579b13a3560d70acd75c698efd93d75b7eb6d88cd1bb466d1"} Oct 11 05:05:11 crc kubenswrapper[4651]: I1011 05:05:11.986042 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-74cb5cbc49-fwr7j" event={"ID":"205c5753-c94e-4bf4-993f-36b798bb489d","Type":"ContainerStarted","Data":"f1f59f07fad34e14e67033db65940fc7c8c6e70cebe6a505d5063212223dc4db"} Oct 11 05:05:11 crc kubenswrapper[4651]: I1011 05:05:11.987091 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-59cdc64769-xtlln" event={"ID":"b5b061bd-7d85-4960-a956-95c7911591a2","Type":"ContainerStarted","Data":"8660b6e7fba7699dd111ee457e3ff05a2acba935c40211dfae4aa04bbb8280cc"} Oct 11 05:05:11 crc kubenswrapper[4651]: I1011 05:05:11.990340 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-5777b4f897-zdcb9" event={"ID":"519c3c0c-07ee-4f48-ba92-d202190d9a49","Type":"ContainerStarted","Data":"cf781331b418b8714a503aa7b69e637f5bf8d578628ecc3ac86abf4a393043bb"} Oct 11 05:05:11 crc kubenswrapper[4651]: I1011 05:05:11.992499 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-59578bc799-9d9sm" event={"ID":"5901a38b-902a-4822-8483-9d478e61aa40","Type":"ContainerStarted","Data":"be806412364e2fa79361450ddcc9ed6565e97cf11dc56edcfe2a8a0ed8589a56"} Oct 11 05:05:12 crc kubenswrapper[4651]: I1011 05:05:12.118386 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-797d478b46-xgxfd"] Oct 11 05:05:12 crc kubenswrapper[4651]: I1011 05:05:12.134779 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-664664cb68-5s2zz"] Oct 11 05:05:12 crc kubenswrapper[4651]: E1011 05:05:12.153281 4651 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/swift-operator@sha256:4b4a17fe08ce00e375afaaec6a28835f5c1784f03d11c4558376ac04130f3a9e,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-x29qh,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-5f4d5dfdc6-vmqd8_openstack-operators(f4175bba-9aae-4faf-8670-f612f867827e): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 11 05:05:12 crc kubenswrapper[4651]: I1011 05:05:12.154520 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f4d5dfdc6-vmqd8"] Oct 11 05:05:12 crc kubenswrapper[4651]: I1011 05:05:12.161556 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-578874c84d-x9x8w"] Oct 11 05:05:12 crc kubenswrapper[4651]: E1011 05:05:12.162548 4651 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/openstack-baremetal-operator@sha256:a17fc270857869fd1efe5020b2a1cb8c2abbd838f08de88f3a6a59e8754ec351,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:true,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/openstack-k8s-operators/openstack-baremetal-operator-agent:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_ANSIBLEEE_IMAGE_URL_DEFAULT,Value:quay.io/openstack-k8s-operators/openstack-ansibleee-runner:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AODH_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-aodh-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AODH_EVALUATOR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-aodh-evaluator:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AODH_LISTENER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-aodh-listener:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AODH_NOTIFIER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-aodh-notifier:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_APACHE_IMAGE_URL_DEFAULT,Value:registry.redhat.io/ubi9/httpd-24:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_BARBICAN_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_BARBICAN_KEYSTONE_LISTENER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-barbican-keystone-listener:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_BARBICAN_WORKER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-barbican-worker:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_CENTRAL_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ceilometer-central:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_COMPUTE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_IPMI_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ceilometer-ipmi:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_MYSQLD_EXPORTER_IMAGE_URL_DEFAULT,Value:quay.io/prometheus/mysqld-exporter:v0.15.1,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_NOTIFICATION_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ceilometer-notification:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_SGCORE_IMAGE_URL_DEFAULT,Value:quay.io/openstack-k8s-operators/sg-core:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CINDER_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CINDER_BACKUP_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cinder-backup:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CINDER_SCHEDULER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cinder-scheduler:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CINDER_VOLUME_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cinder-volume:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_BACKENDBIND9_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-backend-bind9:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_CENTRAL_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-central:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_MDNS_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-mdns:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_PRODUCER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-producer:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_UNBOUND_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-unbound:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_WORKER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-worker:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_FRR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-frr:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_ISCSID_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-iscsid:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_KEPLER_IMAGE_URL_DEFAULT,Value:quay.io/sustainable_computing_io/kepler:release-0.7.12,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_LOGROTATE_CROND_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cron:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_MULTIPATHD_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-multipathd:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NEUTRON_DHCP_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-dhcp-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NEUTRON_METADATA_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NEUTRON_OVN_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-ovn-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NEUTRON_SRIOV_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-sriov-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NODE_EXPORTER_IMAGE_URL_DEFAULT,Value:quay.io/prometheus/node-exporter:v1.5.0,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_OVN_BGP_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-bgp-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_PODMAN_EXPORTER_IMAGE_URL_DEFAULT,Value:quay.io/navidys/prometheus-podman-exporter:v1.10.1,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_GLANCE_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-glance-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_HEAT_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-heat-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_HEAT_CFNAPI_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-heat-api-cfn:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_HEAT_ENGINE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-heat-engine:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_HORIZON_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_INFRA_MEMCACHED_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-memcached:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_INFRA_REDIS_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-redis:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_CONDUCTOR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-conductor:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_INSPECTOR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-inspector:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_NEUTRON_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-neutron-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_PXE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-pxe:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_PYTHON_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/ironic-python-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_KEYSTONE_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-keystone:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_KSM_IMAGE_URL_DEFAULT,Value:registry.k8s.io/kube-state-metrics/kube-state-metrics:v2.15.0,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_MANILA_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-manila-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_MANILA_SCHEDULER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-manila-scheduler:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_MANILA_SHARE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-manila-share:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_MARIADB_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-mariadb:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NET_UTILS_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-netutils:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NEUTRON_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_COMPUTE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-compute:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_CONDUCTOR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-conductor:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_NOVNC_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-novncproxy:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_SCHEDULER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-scheduler:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-octavia-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_HEALTHMANAGER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-octavia-health-manager:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_HOUSEKEEPING_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-octavia-housekeeping:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_RSYSLOG_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-rsyslog:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_WORKER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-octavia-worker:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OPENSTACK_CLIENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-openstackclient:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OPENSTACK_MUST_GATHER_DEFAULT,Value:quay.io/openstack-k8s-operators/openstack-must-gather:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OPENSTACK_NETWORK_EXPORTER_IMAGE_URL_DEFAULT,Value:quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OS_CONTAINER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/edpm-hardened-uefi:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_CONTROLLER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_CONTROLLER_OVS_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-base:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_NB_DBCLUSTER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-nb-db-server:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_NORTHD_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-northd:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_SB_DBCLUSTER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-sb-db-server:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_PLACEMENT_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-placement-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_RABBITMQ_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_SWIFT_ACCOUNT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-swift-account:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_SWIFT_CONTAINER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-swift-container:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_SWIFT_OBJECT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-swift-object:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_SWIFT_PROXY_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-swift-proxy-server:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_TEST_TEMPEST_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_WATCHER_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-master-centos9/openstack-watcher-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_WATCHER_APPLIER_IMAGE_URL_DEFAULT,Value:quay.io/podified-master-centos9/openstack-watcher-applier:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_WATCHER_DECISION_ENGINE_IMAGE_URL_DEFAULT,Value:quay.io/podified-master-centos9/openstack-watcher-decision-engine:current-podified,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:cert,ReadOnly:true,MountPath:/tmp/k8s-webhook-server/serving-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-p7txb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-baremetal-operator-controller-manager-6cc7fb757dxbxc6_openstack-operators(9105b503-cbba-48d1-acdb-ac21b7c791b4): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 11 05:05:12 crc kubenswrapper[4651]: E1011 05:05:12.167755 4651 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/neutron-operator@sha256:33652e75a03a058769019fe8d8c51585a6eeefef5e1ecb96f9965434117954f2,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-5c8cx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod neutron-operator-controller-manager-797d478b46-xgxfd_openstack-operators(fcc9418a-3e9d-4c74-849d-b9884077820c): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 11 05:05:12 crc kubenswrapper[4651]: W1011 05:05:12.174119 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0e634116_91fc_4fad_b906_a998e77ea3e4.slice/crio-443c3572a4c30ae8a859da87b55bd5fdb3439adeabbd9f4aefc071abce5c8f7d WatchSource:0}: Error finding container 443c3572a4c30ae8a859da87b55bd5fdb3439adeabbd9f4aefc071abce5c8f7d: Status 404 returned error can't find the container with id 443c3572a4c30ae8a859da87b55bd5fdb3439adeabbd9f4aefc071abce5c8f7d Oct 11 05:05:12 crc kubenswrapper[4651]: I1011 05:05:12.176937 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-869cc7797f-6m2qz"] Oct 11 05:05:12 crc kubenswrapper[4651]: W1011 05:05:12.188201 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbd36db45_dfcf_4d27_8bfb_fcefeff7f0ba.slice/crio-0a137596eaa16f92a12279f168d4dcdc805d063643fb1679d76fc0b4178fd32e WatchSource:0}: Error finding container 0a137596eaa16f92a12279f168d4dcdc805d063643fb1679d76fc0b4178fd32e: Status 404 returned error can't find the container with id 0a137596eaa16f92a12279f168d4dcdc805d063643fb1679d76fc0b4178fd32e Oct 11 05:05:12 crc kubenswrapper[4651]: I1011 05:05:12.189015 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-ffcdd6c94-p9d4d"] Oct 11 05:05:12 crc kubenswrapper[4651]: E1011 05:05:12.189263 4651 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/test-operator@sha256:7e584b1c430441c8b6591dadeff32e065de8a185ad37ef90d2e08d37e59aab4a,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-g2tf5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-ffcdd6c94-p9d4d_openstack-operators(0e634116-91fc-4fad-b906-a998e77ea3e4): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 11 05:05:12 crc kubenswrapper[4651]: E1011 05:05:12.189363 4651 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:38.102.83.147:5001/openstack-k8s-operators/infra-operator:8ca4dd1cb823b5f481ee4cfa3c1c30f832a2ffb6,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{600 -3} {} 600m DecimalSI},memory: {{2147483648 0} {} 2Gi BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{536870912 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:cert,ReadOnly:true,MountPath:/tmp/k8s-webhook-server/serving-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-8n5s7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod infra-operator-controller-manager-8678f847b6-vpnkk_openstack-operators(50a25e99-d2ec-4b16-a5fa-894e79ee528e): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 11 05:05:12 crc kubenswrapper[4651]: E1011 05:05:12.190760 4651 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/watcher-operator@sha256:98a5233f0596591acdf2c6a5838b08be108787cdb6ad1995b2b7886bac0fe6ca,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-bdgxm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-646675d848-7w8pk_openstack-operators(9bfc9d92-8b6d-4b13-9759-b7185e1f16bb): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 11 05:05:12 crc kubenswrapper[4651]: E1011 05:05:12.195232 4651 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-56gsv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-5f97d8c699-dfqdz_openstack-operators(bd36db45-dfcf-4d27-8bfb-fcefeff7f0ba): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 11 05:05:12 crc kubenswrapper[4651]: E1011 05:05:12.196409 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-dfqdz" podUID="bd36db45-dfcf-4d27-8bfb-fcefeff7f0ba" Oct 11 05:05:12 crc kubenswrapper[4651]: I1011 05:05:12.196799 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-8678f847b6-vpnkk"] Oct 11 05:05:12 crc kubenswrapper[4651]: I1011 05:05:12.203875 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-6cc7fb757dxbxc6"] Oct 11 05:05:12 crc kubenswrapper[4651]: I1011 05:05:12.207480 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-dfqdz"] Oct 11 05:05:12 crc kubenswrapper[4651]: I1011 05:05:12.211283 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-646675d848-7w8pk"] Oct 11 05:05:12 crc kubenswrapper[4651]: I1011 05:05:12.296907 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-776b77588d-7z9rb"] Oct 11 05:05:12 crc kubenswrapper[4651]: W1011 05:05:12.322031 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7bcc02c2_c9c4_498e_8f95_ace0d1b98899.slice/crio-cdd3140ea66705faa190faa2c67ea64462ccbe0c759ca312fff03cb5cb1d3087 WatchSource:0}: Error finding container cdd3140ea66705faa190faa2c67ea64462ccbe0c759ca312fff03cb5cb1d3087: Status 404 returned error can't find the container with id cdd3140ea66705faa190faa2c67ea64462ccbe0c759ca312fff03cb5cb1d3087 Oct 11 05:05:12 crc kubenswrapper[4651]: E1011 05:05:12.586522 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/neutron-operator-controller-manager-797d478b46-xgxfd" podUID="fcc9418a-3e9d-4c74-849d-b9884077820c" Oct 11 05:05:12 crc kubenswrapper[4651]: E1011 05:05:12.602784 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/swift-operator-controller-manager-5f4d5dfdc6-vmqd8" podUID="f4175bba-9aae-4faf-8670-f612f867827e" Oct 11 05:05:12 crc kubenswrapper[4651]: E1011 05:05:12.624216 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/watcher-operator-controller-manager-646675d848-7w8pk" podUID="9bfc9d92-8b6d-4b13-9759-b7185e1f16bb" Oct 11 05:05:12 crc kubenswrapper[4651]: E1011 05:05:12.650452 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/test-operator-controller-manager-ffcdd6c94-p9d4d" podUID="0e634116-91fc-4fad-b906-a998e77ea3e4" Oct 11 05:05:12 crc kubenswrapper[4651]: E1011 05:05:12.665164 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6cc7fb757dxbxc6" podUID="9105b503-cbba-48d1-acdb-ac21b7c791b4" Oct 11 05:05:12 crc kubenswrapper[4651]: E1011 05:05:12.695618 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/infra-operator-controller-manager-8678f847b6-vpnkk" podUID="50a25e99-d2ec-4b16-a5fa-894e79ee528e" Oct 11 05:05:13 crc kubenswrapper[4651]: I1011 05:05:13.018442 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f4d5dfdc6-vmqd8" event={"ID":"f4175bba-9aae-4faf-8670-f612f867827e","Type":"ContainerStarted","Data":"ba7568588c2d6f18ff035be1eb195943142e52787b9764953fc7ef31fb5c5afe"} Oct 11 05:05:13 crc kubenswrapper[4651]: I1011 05:05:13.018490 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f4d5dfdc6-vmqd8" event={"ID":"f4175bba-9aae-4faf-8670-f612f867827e","Type":"ContainerStarted","Data":"19fd5e4d2254a0d0737a49e416bb7be7ab033fe6b2ae575c0fc9d35b7cfd0039"} Oct 11 05:05:13 crc kubenswrapper[4651]: E1011 05:05:13.022399 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:4b4a17fe08ce00e375afaaec6a28835f5c1784f03d11c4558376ac04130f3a9e\\\"\"" pod="openstack-operators/swift-operator-controller-manager-5f4d5dfdc6-vmqd8" podUID="f4175bba-9aae-4faf-8670-f612f867827e" Oct 11 05:05:13 crc kubenswrapper[4651]: I1011 05:05:13.027395 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-797d478b46-xgxfd" event={"ID":"fcc9418a-3e9d-4c74-849d-b9884077820c","Type":"ContainerStarted","Data":"89a8018c94c87b8799fc688149d34b014e731f24c3f21cf1ba2596fc8c5e008f"} Oct 11 05:05:13 crc kubenswrapper[4651]: I1011 05:05:13.027429 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-797d478b46-xgxfd" event={"ID":"fcc9418a-3e9d-4c74-849d-b9884077820c","Type":"ContainerStarted","Data":"c06eac3ecbaeb4bf8112993ffe79e2de55c5126f55351ba33b396d68fd0bf1d9"} Oct 11 05:05:13 crc kubenswrapper[4651]: E1011 05:05:13.029704 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/neutron-operator@sha256:33652e75a03a058769019fe8d8c51585a6eeefef5e1ecb96f9965434117954f2\\\"\"" pod="openstack-operators/neutron-operator-controller-manager-797d478b46-xgxfd" podUID="fcc9418a-3e9d-4c74-849d-b9884077820c" Oct 11 05:05:13 crc kubenswrapper[4651]: I1011 05:05:13.054100 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-ffcdd6c94-p9d4d" event={"ID":"0e634116-91fc-4fad-b906-a998e77ea3e4","Type":"ContainerStarted","Data":"172af3c9c4fff681b90a6c0ac33cdb0c3f0240a867cd5423c25a3acf47db646d"} Oct 11 05:05:13 crc kubenswrapper[4651]: I1011 05:05:13.054147 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-ffcdd6c94-p9d4d" event={"ID":"0e634116-91fc-4fad-b906-a998e77ea3e4","Type":"ContainerStarted","Data":"443c3572a4c30ae8a859da87b55bd5fdb3439adeabbd9f4aefc071abce5c8f7d"} Oct 11 05:05:13 crc kubenswrapper[4651]: E1011 05:05:13.056537 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:7e584b1c430441c8b6591dadeff32e065de8a185ad37ef90d2e08d37e59aab4a\\\"\"" pod="openstack-operators/test-operator-controller-manager-ffcdd6c94-p9d4d" podUID="0e634116-91fc-4fad-b906-a998e77ea3e4" Oct 11 05:05:13 crc kubenswrapper[4651]: I1011 05:05:13.071316 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-776b77588d-7z9rb" event={"ID":"7bcc02c2-c9c4-498e-8f95-ace0d1b98899","Type":"ContainerStarted","Data":"9bef839e5bef496bdcbf7570ef386214ce70f9f665f4b68259391c12108bdfd5"} Oct 11 05:05:13 crc kubenswrapper[4651]: I1011 05:05:13.071356 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-776b77588d-7z9rb" event={"ID":"7bcc02c2-c9c4-498e-8f95-ace0d1b98899","Type":"ContainerStarted","Data":"61e84cf2074df44a6c738fbd4786474f78f3f39e13fa5a718cb8e0234839f09f"} Oct 11 05:05:13 crc kubenswrapper[4651]: I1011 05:05:13.071366 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-776b77588d-7z9rb" event={"ID":"7bcc02c2-c9c4-498e-8f95-ace0d1b98899","Type":"ContainerStarted","Data":"cdd3140ea66705faa190faa2c67ea64462ccbe0c759ca312fff03cb5cb1d3087"} Oct 11 05:05:13 crc kubenswrapper[4651]: I1011 05:05:13.072073 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-776b77588d-7z9rb" Oct 11 05:05:13 crc kubenswrapper[4651]: I1011 05:05:13.073664 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-869cc7797f-6m2qz" event={"ID":"c9dabf8d-2991-4af0-99c8-084e157e9b52","Type":"ContainerStarted","Data":"c97bc704f1eebfa166eaed7f55623f8021abec77ce594d20ace64e417574de21"} Oct 11 05:05:13 crc kubenswrapper[4651]: I1011 05:05:13.076646 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-dfqdz" event={"ID":"bd36db45-dfcf-4d27-8bfb-fcefeff7f0ba","Type":"ContainerStarted","Data":"0a137596eaa16f92a12279f168d4dcdc805d063643fb1679d76fc0b4178fd32e"} Oct 11 05:05:13 crc kubenswrapper[4651]: E1011 05:05:13.083172 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-dfqdz" podUID="bd36db45-dfcf-4d27-8bfb-fcefeff7f0ba" Oct 11 05:05:13 crc kubenswrapper[4651]: I1011 05:05:13.089418 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6cc7fb757dxbxc6" event={"ID":"9105b503-cbba-48d1-acdb-ac21b7c791b4","Type":"ContainerStarted","Data":"dd0f137263f2dd3a6146744e5891e82a0f79db29000973a418fa89b27aa2e842"} Oct 11 05:05:13 crc kubenswrapper[4651]: I1011 05:05:13.089472 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6cc7fb757dxbxc6" event={"ID":"9105b503-cbba-48d1-acdb-ac21b7c791b4","Type":"ContainerStarted","Data":"963ea8bd943618ee7b86d812282e99face59c53c6a918b029528b8aa14ec0bae"} Oct 11 05:05:13 crc kubenswrapper[4651]: E1011 05:05:13.091053 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/openstack-baremetal-operator@sha256:a17fc270857869fd1efe5020b2a1cb8c2abbd838f08de88f3a6a59e8754ec351\\\"\"" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6cc7fb757dxbxc6" podUID="9105b503-cbba-48d1-acdb-ac21b7c791b4" Oct 11 05:05:13 crc kubenswrapper[4651]: I1011 05:05:13.095964 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-8678f847b6-vpnkk" event={"ID":"50a25e99-d2ec-4b16-a5fa-894e79ee528e","Type":"ContainerStarted","Data":"82ef59690f438bd776630534f31c19d67fcb1862a0e2a9373d0b6796d503d268"} Oct 11 05:05:13 crc kubenswrapper[4651]: I1011 05:05:13.096007 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-8678f847b6-vpnkk" event={"ID":"50a25e99-d2ec-4b16-a5fa-894e79ee528e","Type":"ContainerStarted","Data":"97b1221990780a856aca4df671fb7a119ee8e655db111b8f7f2d257c00989208"} Oct 11 05:05:13 crc kubenswrapper[4651]: E1011 05:05:13.102040 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.147:5001/openstack-k8s-operators/infra-operator:8ca4dd1cb823b5f481ee4cfa3c1c30f832a2ffb6\\\"\"" pod="openstack-operators/infra-operator-controller-manager-8678f847b6-vpnkk" podUID="50a25e99-d2ec-4b16-a5fa-894e79ee528e" Oct 11 05:05:13 crc kubenswrapper[4651]: I1011 05:05:13.108945 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-664664cb68-5s2zz" event={"ID":"160294e4-b990-41b3-8f6c-22102366d72c","Type":"ContainerStarted","Data":"cf3f880beeef9b91e1db535ce1e244371fe9e8ade7e61fbaebd82b5a4b3c238e"} Oct 11 05:05:13 crc kubenswrapper[4651]: I1011 05:05:13.116678 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-578874c84d-x9x8w" event={"ID":"555de60b-f68b-42a6-a662-d1e5202a30c5","Type":"ContainerStarted","Data":"c559da27ea85a9f4178e77bceb837354b3a272d150e7ce6b9cc9314ff27dea34"} Oct 11 05:05:13 crc kubenswrapper[4651]: I1011 05:05:13.119725 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-776b77588d-7z9rb" podStartSLOduration=3.119708575 podStartE2EDuration="3.119708575s" podCreationTimestamp="2025-10-11 05:05:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 05:05:13.114575805 +0000 UTC m=+834.010808601" watchObservedRunningTime="2025-10-11 05:05:13.119708575 +0000 UTC m=+834.015941371" Oct 11 05:05:13 crc kubenswrapper[4651]: I1011 05:05:13.133553 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-646675d848-7w8pk" event={"ID":"9bfc9d92-8b6d-4b13-9759-b7185e1f16bb","Type":"ContainerStarted","Data":"dfd085698fd0c24a7478cc317427a55b1caaef004ee058339047f0c624243f3c"} Oct 11 05:05:13 crc kubenswrapper[4651]: I1011 05:05:13.133623 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-646675d848-7w8pk" event={"ID":"9bfc9d92-8b6d-4b13-9759-b7185e1f16bb","Type":"ContainerStarted","Data":"021a9c583ec09aed9fe28035a3c5f7ef47bed2256806faf33a342c8b12818cee"} Oct 11 05:05:13 crc kubenswrapper[4651]: E1011 05:05:13.149615 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:98a5233f0596591acdf2c6a5838b08be108787cdb6ad1995b2b7886bac0fe6ca\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-646675d848-7w8pk" podUID="9bfc9d92-8b6d-4b13-9759-b7185e1f16bb" Oct 11 05:05:14 crc kubenswrapper[4651]: E1011 05:05:14.157845 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:4b4a17fe08ce00e375afaaec6a28835f5c1784f03d11c4558376ac04130f3a9e\\\"\"" pod="openstack-operators/swift-operator-controller-manager-5f4d5dfdc6-vmqd8" podUID="f4175bba-9aae-4faf-8670-f612f867827e" Oct 11 05:05:14 crc kubenswrapper[4651]: E1011 05:05:14.157976 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.147:5001/openstack-k8s-operators/infra-operator:8ca4dd1cb823b5f481ee4cfa3c1c30f832a2ffb6\\\"\"" pod="openstack-operators/infra-operator-controller-manager-8678f847b6-vpnkk" podUID="50a25e99-d2ec-4b16-a5fa-894e79ee528e" Oct 11 05:05:14 crc kubenswrapper[4651]: E1011 05:05:14.158026 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-dfqdz" podUID="bd36db45-dfcf-4d27-8bfb-fcefeff7f0ba" Oct 11 05:05:14 crc kubenswrapper[4651]: E1011 05:05:14.158064 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/openstack-baremetal-operator@sha256:a17fc270857869fd1efe5020b2a1cb8c2abbd838f08de88f3a6a59e8754ec351\\\"\"" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6cc7fb757dxbxc6" podUID="9105b503-cbba-48d1-acdb-ac21b7c791b4" Oct 11 05:05:14 crc kubenswrapper[4651]: E1011 05:05:14.158098 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:98a5233f0596591acdf2c6a5838b08be108787cdb6ad1995b2b7886bac0fe6ca\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-646675d848-7w8pk" podUID="9bfc9d92-8b6d-4b13-9759-b7185e1f16bb" Oct 11 05:05:14 crc kubenswrapper[4651]: E1011 05:05:14.158153 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/neutron-operator@sha256:33652e75a03a058769019fe8d8c51585a6eeefef5e1ecb96f9965434117954f2\\\"\"" pod="openstack-operators/neutron-operator-controller-manager-797d478b46-xgxfd" podUID="fcc9418a-3e9d-4c74-849d-b9884077820c" Oct 11 05:05:14 crc kubenswrapper[4651]: E1011 05:05:14.160908 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:7e584b1c430441c8b6591dadeff32e065de8a185ad37ef90d2e08d37e59aab4a\\\"\"" pod="openstack-operators/test-operator-controller-manager-ffcdd6c94-p9d4d" podUID="0e634116-91fc-4fad-b906-a998e77ea3e4" Oct 11 05:05:18 crc kubenswrapper[4651]: I1011 05:05:18.818041 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-bwlhb"] Oct 11 05:05:18 crc kubenswrapper[4651]: I1011 05:05:18.823396 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bwlhb" Oct 11 05:05:18 crc kubenswrapper[4651]: I1011 05:05:18.833539 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-bwlhb"] Oct 11 05:05:18 crc kubenswrapper[4651]: I1011 05:05:18.869041 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/15f267a4-a699-4088-8769-7922096fc774-utilities\") pod \"redhat-operators-bwlhb\" (UID: \"15f267a4-a699-4088-8769-7922096fc774\") " pod="openshift-marketplace/redhat-operators-bwlhb" Oct 11 05:05:18 crc kubenswrapper[4651]: I1011 05:05:18.869109 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wn6l8\" (UniqueName: \"kubernetes.io/projected/15f267a4-a699-4088-8769-7922096fc774-kube-api-access-wn6l8\") pod \"redhat-operators-bwlhb\" (UID: \"15f267a4-a699-4088-8769-7922096fc774\") " pod="openshift-marketplace/redhat-operators-bwlhb" Oct 11 05:05:18 crc kubenswrapper[4651]: I1011 05:05:18.869602 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/15f267a4-a699-4088-8769-7922096fc774-catalog-content\") pod \"redhat-operators-bwlhb\" (UID: \"15f267a4-a699-4088-8769-7922096fc774\") " pod="openshift-marketplace/redhat-operators-bwlhb" Oct 11 05:05:18 crc kubenswrapper[4651]: I1011 05:05:18.972296 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/15f267a4-a699-4088-8769-7922096fc774-utilities\") pod \"redhat-operators-bwlhb\" (UID: \"15f267a4-a699-4088-8769-7922096fc774\") " pod="openshift-marketplace/redhat-operators-bwlhb" Oct 11 05:05:18 crc kubenswrapper[4651]: I1011 05:05:18.972345 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wn6l8\" (UniqueName: \"kubernetes.io/projected/15f267a4-a699-4088-8769-7922096fc774-kube-api-access-wn6l8\") pod \"redhat-operators-bwlhb\" (UID: \"15f267a4-a699-4088-8769-7922096fc774\") " pod="openshift-marketplace/redhat-operators-bwlhb" Oct 11 05:05:18 crc kubenswrapper[4651]: I1011 05:05:18.972417 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/15f267a4-a699-4088-8769-7922096fc774-catalog-content\") pod \"redhat-operators-bwlhb\" (UID: \"15f267a4-a699-4088-8769-7922096fc774\") " pod="openshift-marketplace/redhat-operators-bwlhb" Oct 11 05:05:18 crc kubenswrapper[4651]: I1011 05:05:18.972993 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/15f267a4-a699-4088-8769-7922096fc774-utilities\") pod \"redhat-operators-bwlhb\" (UID: \"15f267a4-a699-4088-8769-7922096fc774\") " pod="openshift-marketplace/redhat-operators-bwlhb" Oct 11 05:05:18 crc kubenswrapper[4651]: I1011 05:05:18.973155 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/15f267a4-a699-4088-8769-7922096fc774-catalog-content\") pod \"redhat-operators-bwlhb\" (UID: \"15f267a4-a699-4088-8769-7922096fc774\") " pod="openshift-marketplace/redhat-operators-bwlhb" Oct 11 05:05:19 crc kubenswrapper[4651]: I1011 05:05:18.993994 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wn6l8\" (UniqueName: \"kubernetes.io/projected/15f267a4-a699-4088-8769-7922096fc774-kube-api-access-wn6l8\") pod \"redhat-operators-bwlhb\" (UID: \"15f267a4-a699-4088-8769-7922096fc774\") " pod="openshift-marketplace/redhat-operators-bwlhb" Oct 11 05:05:19 crc kubenswrapper[4651]: I1011 05:05:19.146875 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bwlhb" Oct 11 05:05:21 crc kubenswrapper[4651]: I1011 05:05:21.205162 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-vgwd5"] Oct 11 05:05:21 crc kubenswrapper[4651]: I1011 05:05:21.207562 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vgwd5" Oct 11 05:05:21 crc kubenswrapper[4651]: I1011 05:05:21.215423 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/83ff9f8d-04fd-4fa5-894e-4e63202fae9c-catalog-content\") pod \"community-operators-vgwd5\" (UID: \"83ff9f8d-04fd-4fa5-894e-4e63202fae9c\") " pod="openshift-marketplace/community-operators-vgwd5" Oct 11 05:05:21 crc kubenswrapper[4651]: I1011 05:05:21.215880 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8pxpw\" (UniqueName: \"kubernetes.io/projected/83ff9f8d-04fd-4fa5-894e-4e63202fae9c-kube-api-access-8pxpw\") pod \"community-operators-vgwd5\" (UID: \"83ff9f8d-04fd-4fa5-894e-4e63202fae9c\") " pod="openshift-marketplace/community-operators-vgwd5" Oct 11 05:05:21 crc kubenswrapper[4651]: I1011 05:05:21.216052 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/83ff9f8d-04fd-4fa5-894e-4e63202fae9c-utilities\") pod \"community-operators-vgwd5\" (UID: \"83ff9f8d-04fd-4fa5-894e-4e63202fae9c\") " pod="openshift-marketplace/community-operators-vgwd5" Oct 11 05:05:21 crc kubenswrapper[4651]: I1011 05:05:21.217679 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-vgwd5"] Oct 11 05:05:21 crc kubenswrapper[4651]: I1011 05:05:21.317354 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/83ff9f8d-04fd-4fa5-894e-4e63202fae9c-catalog-content\") pod \"community-operators-vgwd5\" (UID: \"83ff9f8d-04fd-4fa5-894e-4e63202fae9c\") " pod="openshift-marketplace/community-operators-vgwd5" Oct 11 05:05:21 crc kubenswrapper[4651]: I1011 05:05:21.317843 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8pxpw\" (UniqueName: \"kubernetes.io/projected/83ff9f8d-04fd-4fa5-894e-4e63202fae9c-kube-api-access-8pxpw\") pod \"community-operators-vgwd5\" (UID: \"83ff9f8d-04fd-4fa5-894e-4e63202fae9c\") " pod="openshift-marketplace/community-operators-vgwd5" Oct 11 05:05:21 crc kubenswrapper[4651]: I1011 05:05:21.317890 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/83ff9f8d-04fd-4fa5-894e-4e63202fae9c-utilities\") pod \"community-operators-vgwd5\" (UID: \"83ff9f8d-04fd-4fa5-894e-4e63202fae9c\") " pod="openshift-marketplace/community-operators-vgwd5" Oct 11 05:05:21 crc kubenswrapper[4651]: I1011 05:05:21.317888 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/83ff9f8d-04fd-4fa5-894e-4e63202fae9c-catalog-content\") pod \"community-operators-vgwd5\" (UID: \"83ff9f8d-04fd-4fa5-894e-4e63202fae9c\") " pod="openshift-marketplace/community-operators-vgwd5" Oct 11 05:05:21 crc kubenswrapper[4651]: I1011 05:05:21.318197 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/83ff9f8d-04fd-4fa5-894e-4e63202fae9c-utilities\") pod \"community-operators-vgwd5\" (UID: \"83ff9f8d-04fd-4fa5-894e-4e63202fae9c\") " pod="openshift-marketplace/community-operators-vgwd5" Oct 11 05:05:21 crc kubenswrapper[4651]: I1011 05:05:21.348404 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8pxpw\" (UniqueName: \"kubernetes.io/projected/83ff9f8d-04fd-4fa5-894e-4e63202fae9c-kube-api-access-8pxpw\") pod \"community-operators-vgwd5\" (UID: \"83ff9f8d-04fd-4fa5-894e-4e63202fae9c\") " pod="openshift-marketplace/community-operators-vgwd5" Oct 11 05:05:21 crc kubenswrapper[4651]: I1011 05:05:21.541171 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vgwd5" Oct 11 05:05:21 crc kubenswrapper[4651]: I1011 05:05:21.860280 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-776b77588d-7z9rb" Oct 11 05:05:22 crc kubenswrapper[4651]: I1011 05:05:22.062092 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-bwlhb"] Oct 11 05:05:22 crc kubenswrapper[4651]: W1011 05:05:22.118878 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod15f267a4_a699_4088_8769_7922096fc774.slice/crio-785328cc5f490b88d9987c29a05c75972ac3e28ac69dd6e04aeed276c09bdf8a WatchSource:0}: Error finding container 785328cc5f490b88d9987c29a05c75972ac3e28ac69dd6e04aeed276c09bdf8a: Status 404 returned error can't find the container with id 785328cc5f490b88d9987c29a05c75972ac3e28ac69dd6e04aeed276c09bdf8a Oct 11 05:05:22 crc kubenswrapper[4651]: I1011 05:05:22.183924 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-vgwd5"] Oct 11 05:05:22 crc kubenswrapper[4651]: I1011 05:05:22.224102 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-687df44cdb-g8m5f" event={"ID":"75692ce0-1ecb-4db6-a831-5740382b17e2","Type":"ContainerStarted","Data":"450aa15b78888acc9b5b0c87057cb162679c434c608b582d88eeabd6f1799654"} Oct 11 05:05:22 crc kubenswrapper[4651]: I1011 05:05:22.228847 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-7bb46cd7d-t88s8" event={"ID":"99b28924-6f7a-4232-8fd5-b245178ce2ea","Type":"ContainerStarted","Data":"f51f0827d9b502046d83ae085abf5b9b67cf19c5c7a968c5c6c8d2c9bd59e202"} Oct 11 05:05:22 crc kubenswrapper[4651]: I1011 05:05:22.230689 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bwlhb" event={"ID":"15f267a4-a699-4088-8769-7922096fc774","Type":"ContainerStarted","Data":"785328cc5f490b88d9987c29a05c75972ac3e28ac69dd6e04aeed276c09bdf8a"} Oct 11 05:05:22 crc kubenswrapper[4651]: I1011 05:05:22.231620 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vgwd5" event={"ID":"83ff9f8d-04fd-4fa5-894e-4e63202fae9c","Type":"ContainerStarted","Data":"963faab473e51d92eedf8a8af3d2d9e5150bdedea2aa8a90b8e9daac52eec6ca"} Oct 11 05:05:22 crc kubenswrapper[4651]: I1011 05:05:22.234685 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-6d7c7ddf95-27r6g" event={"ID":"4aede6f1-d9d0-4c62-b118-7c93fa2af789","Type":"ContainerStarted","Data":"8d5f11de3e3b3923b76d8c8311ad6327ea082d777474750fa5da3177958feb81"} Oct 11 05:05:22 crc kubenswrapper[4651]: I1011 05:05:22.250927 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-57bb74c7bf-7mhch" event={"ID":"321665c4-bc9a-47e0-a6c4-a54d56ad5ce8","Type":"ContainerStarted","Data":"a654dbe781953eb357e7e9a59fd503f0eb34b6c5d91f6cea40d8bd9801093489"} Oct 11 05:05:22 crc kubenswrapper[4651]: I1011 05:05:22.273676 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-5777b4f897-zdcb9" event={"ID":"519c3c0c-07ee-4f48-ba92-d202190d9a49","Type":"ContainerStarted","Data":"03b0f17815d0ba6a69786dba8272890e531ba32c2284f753099ac5ef05ea1e68"} Oct 11 05:05:23 crc kubenswrapper[4651]: I1011 05:05:23.297610 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-74cb5cbc49-fwr7j" event={"ID":"205c5753-c94e-4bf4-993f-36b798bb489d","Type":"ContainerStarted","Data":"fa5b8d4ee8914f2573daf6c261bcd4b0277213a608efb1eb9e096ef4bfa5c239"} Oct 11 05:05:23 crc kubenswrapper[4651]: I1011 05:05:23.297945 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-74cb5cbc49-fwr7j" event={"ID":"205c5753-c94e-4bf4-993f-36b798bb489d","Type":"ContainerStarted","Data":"59bea8579c1b7e1e602965d8b7abb3f50547ee9a1882e8202740a5bb052a70d4"} Oct 11 05:05:23 crc kubenswrapper[4651]: I1011 05:05:23.298702 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-74cb5cbc49-fwr7j" Oct 11 05:05:23 crc kubenswrapper[4651]: I1011 05:05:23.331341 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-578874c84d-x9x8w" event={"ID":"555de60b-f68b-42a6-a662-d1e5202a30c5","Type":"ContainerStarted","Data":"e8a17dc0f7d5416501dc6bbd1828e6e9c4ec2f5154ec0b8445f0ef57f4013dcb"} Oct 11 05:05:23 crc kubenswrapper[4651]: I1011 05:05:23.335198 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-74cb5cbc49-fwr7j" podStartSLOduration=3.194667717 podStartE2EDuration="13.335186495s" podCreationTimestamp="2025-10-11 05:05:10 +0000 UTC" firstStartedPulling="2025-10-11 05:05:11.504714843 +0000 UTC m=+832.400947639" lastFinishedPulling="2025-10-11 05:05:21.645233621 +0000 UTC m=+842.541466417" observedRunningTime="2025-10-11 05:05:23.335088482 +0000 UTC m=+844.231321298" watchObservedRunningTime="2025-10-11 05:05:23.335186495 +0000 UTC m=+844.231419281" Oct 11 05:05:23 crc kubenswrapper[4651]: I1011 05:05:23.373317 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-6d9967f8dd-prncj" event={"ID":"d725cd21-efdc-4182-be84-460db3042d11","Type":"ContainerStarted","Data":"b54e9fb0b851fcb2b8d8fcb4f25ca22b64d7164174037e6dadf0be9c8fa7811d"} Oct 11 05:05:23 crc kubenswrapper[4651]: I1011 05:05:23.373383 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-6d9967f8dd-prncj" event={"ID":"d725cd21-efdc-4182-be84-460db3042d11","Type":"ContainerStarted","Data":"e44244696e59343aab83495b7f0e17199d9c62064b17e9745f270cc3ce5a6c08"} Oct 11 05:05:23 crc kubenswrapper[4651]: I1011 05:05:23.373670 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-6d9967f8dd-prncj" Oct 11 05:05:23 crc kubenswrapper[4651]: I1011 05:05:23.379181 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-ddb98f99b-989mj" event={"ID":"b6b16d99-7f05-464a-a338-dcded4fa42fa","Type":"ContainerStarted","Data":"07ceb62fe2f222d9ebd75882efb7d492e326ee8a9df436aa961e5cb751aa46fe"} Oct 11 05:05:23 crc kubenswrapper[4651]: I1011 05:05:23.379228 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-ddb98f99b-989mj" event={"ID":"b6b16d99-7f05-464a-a338-dcded4fa42fa","Type":"ContainerStarted","Data":"6113db242ccbcfcb28bcc10c5e19afe68b886b033a5107c0bd812ba99d8148cc"} Oct 11 05:05:23 crc kubenswrapper[4651]: I1011 05:05:23.379891 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-ddb98f99b-989mj" Oct 11 05:05:23 crc kubenswrapper[4651]: I1011 05:05:23.388690 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-59578bc799-9d9sm" event={"ID":"5901a38b-902a-4822-8483-9d478e61aa40","Type":"ContainerStarted","Data":"a9362c27263d3916689ef23515a98d1362b08549e9aeb67a867deb0a3be17445"} Oct 11 05:05:23 crc kubenswrapper[4651]: I1011 05:05:23.388726 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-59578bc799-9d9sm" event={"ID":"5901a38b-902a-4822-8483-9d478e61aa40","Type":"ContainerStarted","Data":"27ddcb4065d34ce7459094a2ae7ba83c0010dd95519e32be840b63bfd52abdfa"} Oct 11 05:05:23 crc kubenswrapper[4651]: I1011 05:05:23.389670 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-59578bc799-9d9sm" Oct 11 05:05:23 crc kubenswrapper[4651]: I1011 05:05:23.408842 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-57bb74c7bf-7mhch" event={"ID":"321665c4-bc9a-47e0-a6c4-a54d56ad5ce8","Type":"ContainerStarted","Data":"1b04bec7d17cf66b78ee7c0a50fddb8112cfb01f5c61a49b38122aab8f0701df"} Oct 11 05:05:23 crc kubenswrapper[4651]: I1011 05:05:23.408884 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-57bb74c7bf-7mhch" Oct 11 05:05:23 crc kubenswrapper[4651]: I1011 05:05:23.427980 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-7bb46cd7d-t88s8" event={"ID":"99b28924-6f7a-4232-8fd5-b245178ce2ea","Type":"ContainerStarted","Data":"7f331d2790887f5786a1a902068a8b74b009d4595a2280a2b74d3383924957cb"} Oct 11 05:05:23 crc kubenswrapper[4651]: I1011 05:05:23.428608 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-7bb46cd7d-t88s8" Oct 11 05:05:23 crc kubenswrapper[4651]: I1011 05:05:23.441648 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-6d9967f8dd-prncj" podStartSLOduration=3.121683212 podStartE2EDuration="13.441631904s" podCreationTimestamp="2025-10-11 05:05:10 +0000 UTC" firstStartedPulling="2025-10-11 05:05:11.34854035 +0000 UTC m=+832.244773146" lastFinishedPulling="2025-10-11 05:05:21.668489042 +0000 UTC m=+842.564721838" observedRunningTime="2025-10-11 05:05:23.438615327 +0000 UTC m=+844.334848133" watchObservedRunningTime="2025-10-11 05:05:23.441631904 +0000 UTC m=+844.337864700" Oct 11 05:05:23 crc kubenswrapper[4651]: I1011 05:05:23.454246 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-6d7c7ddf95-27r6g" event={"ID":"4aede6f1-d9d0-4c62-b118-7c93fa2af789","Type":"ContainerStarted","Data":"e170b364878831e1a4a56916fb77d05810e11cd77c727466a3e73c968350735d"} Oct 11 05:05:23 crc kubenswrapper[4651]: I1011 05:05:23.454737 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-6d7c7ddf95-27r6g" Oct 11 05:05:23 crc kubenswrapper[4651]: I1011 05:05:23.483456 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-664664cb68-5s2zz" event={"ID":"160294e4-b990-41b3-8f6c-22102366d72c","Type":"ContainerStarted","Data":"9c6d3a053fb834e97c2f5612e15067c8def84adfef0e5eeb5474d4b1beeb8ee9"} Oct 11 05:05:23 crc kubenswrapper[4651]: I1011 05:05:23.483519 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-664664cb68-5s2zz" event={"ID":"160294e4-b990-41b3-8f6c-22102366d72c","Type":"ContainerStarted","Data":"54f5e19af71dd40eebd90503dad762512e20d84f5ea3e240c39425bfcb30414b"} Oct 11 05:05:23 crc kubenswrapper[4651]: I1011 05:05:23.484307 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-664664cb68-5s2zz" Oct 11 05:05:23 crc kubenswrapper[4651]: I1011 05:05:23.489456 4651 generic.go:334] "Generic (PLEG): container finished" podID="15f267a4-a699-4088-8769-7922096fc774" containerID="64927d916fefacf0e307766aa7691b95292dc09219a2ccf75df41d51dde65d09" exitCode=0 Oct 11 05:05:23 crc kubenswrapper[4651]: I1011 05:05:23.489514 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bwlhb" event={"ID":"15f267a4-a699-4088-8769-7922096fc774","Type":"ContainerDied","Data":"64927d916fefacf0e307766aa7691b95292dc09219a2ccf75df41d51dde65d09"} Oct 11 05:05:23 crc kubenswrapper[4651]: I1011 05:05:23.499079 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-687df44cdb-g8m5f" event={"ID":"75692ce0-1ecb-4db6-a831-5740382b17e2","Type":"ContainerStarted","Data":"b64342f89be564ce295ff6d3186e6a7dcf2bed5f6cc08de671dbec19e2ad2618"} Oct 11 05:05:23 crc kubenswrapper[4651]: I1011 05:05:23.499832 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-687df44cdb-g8m5f" Oct 11 05:05:23 crc kubenswrapper[4651]: I1011 05:05:23.516581 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-6d74794d9b-c9bzn" event={"ID":"dd5122d9-c098-49cf-9723-bc0c31c6ce3b","Type":"ContainerStarted","Data":"e793d021072631d5debd1141837b7ed749b15d017387dc33cb6c7772901fc638"} Oct 11 05:05:23 crc kubenswrapper[4651]: I1011 05:05:23.517260 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-6d74794d9b-c9bzn" Oct 11 05:05:23 crc kubenswrapper[4651]: I1011 05:05:23.533960 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-59cdc64769-xtlln" event={"ID":"b5b061bd-7d85-4960-a956-95c7911591a2","Type":"ContainerStarted","Data":"1d2dd80e9265e8dcff93119e1c62c4fa9ecf2368b6a807b041481a462b00423f"} Oct 11 05:05:23 crc kubenswrapper[4651]: I1011 05:05:23.540934 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-59578bc799-9d9sm" podStartSLOduration=3.657757013 podStartE2EDuration="13.54092383s" podCreationTimestamp="2025-10-11 05:05:10 +0000 UTC" firstStartedPulling="2025-10-11 05:05:11.691838652 +0000 UTC m=+832.588071448" lastFinishedPulling="2025-10-11 05:05:21.575005479 +0000 UTC m=+842.471238265" observedRunningTime="2025-10-11 05:05:23.538205501 +0000 UTC m=+844.434438307" watchObservedRunningTime="2025-10-11 05:05:23.54092383 +0000 UTC m=+844.437156626" Oct 11 05:05:23 crc kubenswrapper[4651]: I1011 05:05:23.542511 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-57bb74c7bf-7mhch" podStartSLOduration=3.650981052 podStartE2EDuration="13.54250649s" podCreationTimestamp="2025-10-11 05:05:10 +0000 UTC" firstStartedPulling="2025-10-11 05:05:11.683573023 +0000 UTC m=+832.579805819" lastFinishedPulling="2025-10-11 05:05:21.575098461 +0000 UTC m=+842.471331257" observedRunningTime="2025-10-11 05:05:23.499144007 +0000 UTC m=+844.395376813" watchObservedRunningTime="2025-10-11 05:05:23.54250649 +0000 UTC m=+844.438739286" Oct 11 05:05:23 crc kubenswrapper[4651]: I1011 05:05:23.552984 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-64f84fcdbb-gbn6q" event={"ID":"cca7099f-c5ef-4109-91f5-b6831d0771e8","Type":"ContainerStarted","Data":"96cf0a4d655d5e2122e4949ecc1a180405e56433a259b3100f111430df4e0ed9"} Oct 11 05:05:23 crc kubenswrapper[4651]: I1011 05:05:23.553028 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-64f84fcdbb-gbn6q" event={"ID":"cca7099f-c5ef-4109-91f5-b6831d0771e8","Type":"ContainerStarted","Data":"81d9be38aad299057bfc8a25fe12770831b6a5e45a583f148a017e08f2b06275"} Oct 11 05:05:23 crc kubenswrapper[4651]: I1011 05:05:23.553647 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-64f84fcdbb-gbn6q" Oct 11 05:05:23 crc kubenswrapper[4651]: I1011 05:05:23.559622 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-5777b4f897-zdcb9" event={"ID":"519c3c0c-07ee-4f48-ba92-d202190d9a49","Type":"ContainerStarted","Data":"ab26400497cb63c3835390cbf10c63d614747899df50ed028413f5e719b17460"} Oct 11 05:05:23 crc kubenswrapper[4651]: I1011 05:05:23.560088 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-5777b4f897-zdcb9" Oct 11 05:05:23 crc kubenswrapper[4651]: I1011 05:05:23.560575 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-869cc7797f-6m2qz" event={"ID":"c9dabf8d-2991-4af0-99c8-084e157e9b52","Type":"ContainerStarted","Data":"1e2967811ee006020b0f1e021c25388093043c7dead5bc0034652f3113ec2371"} Oct 11 05:05:23 crc kubenswrapper[4651]: I1011 05:05:23.582047 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-7bb46cd7d-t88s8" podStartSLOduration=4.065931585 podStartE2EDuration="14.582033986s" podCreationTimestamp="2025-10-11 05:05:09 +0000 UTC" firstStartedPulling="2025-10-11 05:05:11.083975104 +0000 UTC m=+831.980207900" lastFinishedPulling="2025-10-11 05:05:21.600077505 +0000 UTC m=+842.496310301" observedRunningTime="2025-10-11 05:05:23.580184219 +0000 UTC m=+844.476417025" watchObservedRunningTime="2025-10-11 05:05:23.582033986 +0000 UTC m=+844.478266772" Oct 11 05:05:23 crc kubenswrapper[4651]: I1011 05:05:23.617770 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-ddb98f99b-989mj" podStartSLOduration=3.521971962 podStartE2EDuration="13.617751085s" podCreationTimestamp="2025-10-11 05:05:10 +0000 UTC" firstStartedPulling="2025-10-11 05:05:11.503698377 +0000 UTC m=+832.399931173" lastFinishedPulling="2025-10-11 05:05:21.5994775 +0000 UTC m=+842.495710296" observedRunningTime="2025-10-11 05:05:23.602939268 +0000 UTC m=+844.499172074" watchObservedRunningTime="2025-10-11 05:05:23.617751085 +0000 UTC m=+844.513983881" Oct 11 05:05:23 crc kubenswrapper[4651]: I1011 05:05:23.688756 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-6d7c7ddf95-27r6g" podStartSLOduration=3.992676685 podStartE2EDuration="13.688741082s" podCreationTimestamp="2025-10-11 05:05:10 +0000 UTC" firstStartedPulling="2025-10-11 05:05:11.905462475 +0000 UTC m=+832.801695271" lastFinishedPulling="2025-10-11 05:05:21.601526872 +0000 UTC m=+842.497759668" observedRunningTime="2025-10-11 05:05:23.650190431 +0000 UTC m=+844.546423227" watchObservedRunningTime="2025-10-11 05:05:23.688741082 +0000 UTC m=+844.584973878" Oct 11 05:05:23 crc kubenswrapper[4651]: I1011 05:05:23.783854 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-6d74794d9b-c9bzn" podStartSLOduration=3.592654467 podStartE2EDuration="13.783834662s" podCreationTimestamp="2025-10-11 05:05:10 +0000 UTC" firstStartedPulling="2025-10-11 05:05:11.509655648 +0000 UTC m=+832.405888444" lastFinishedPulling="2025-10-11 05:05:21.700835843 +0000 UTC m=+842.597068639" observedRunningTime="2025-10-11 05:05:23.719979567 +0000 UTC m=+844.616212373" watchObservedRunningTime="2025-10-11 05:05:23.783834662 +0000 UTC m=+844.680067448" Oct 11 05:05:23 crc kubenswrapper[4651]: I1011 05:05:23.784091 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-5777b4f897-zdcb9" podStartSLOduration=3.871409343 podStartE2EDuration="13.784087398s" podCreationTimestamp="2025-10-11 05:05:10 +0000 UTC" firstStartedPulling="2025-10-11 05:05:11.688306663 +0000 UTC m=+832.584539459" lastFinishedPulling="2025-10-11 05:05:21.600984718 +0000 UTC m=+842.497217514" observedRunningTime="2025-10-11 05:05:23.750135064 +0000 UTC m=+844.646367880" watchObservedRunningTime="2025-10-11 05:05:23.784087398 +0000 UTC m=+844.680320194" Oct 11 05:05:23 crc kubenswrapper[4651]: I1011 05:05:23.784608 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-687df44cdb-g8m5f" podStartSLOduration=4.421299839 podStartE2EDuration="14.784604941s" podCreationTimestamp="2025-10-11 05:05:09 +0000 UTC" firstStartedPulling="2025-10-11 05:05:11.237604194 +0000 UTC m=+832.133836990" lastFinishedPulling="2025-10-11 05:05:21.600909296 +0000 UTC m=+842.497142092" observedRunningTime="2025-10-11 05:05:23.780206519 +0000 UTC m=+844.676439315" watchObservedRunningTime="2025-10-11 05:05:23.784604941 +0000 UTC m=+844.680837737" Oct 11 05:05:23 crc kubenswrapper[4651]: I1011 05:05:23.855649 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-64f84fcdbb-gbn6q" podStartSLOduration=4.24977036 podStartE2EDuration="14.855638199s" podCreationTimestamp="2025-10-11 05:05:09 +0000 UTC" firstStartedPulling="2025-10-11 05:05:11.063622808 +0000 UTC m=+831.959855604" lastFinishedPulling="2025-10-11 05:05:21.669490647 +0000 UTC m=+842.565723443" observedRunningTime="2025-10-11 05:05:23.853101464 +0000 UTC m=+844.749334270" watchObservedRunningTime="2025-10-11 05:05:23.855638199 +0000 UTC m=+844.751870995" Oct 11 05:05:23 crc kubenswrapper[4651]: I1011 05:05:23.880192 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-664664cb68-5s2zz" podStartSLOduration=4.419673275 podStartE2EDuration="13.880173963s" podCreationTimestamp="2025-10-11 05:05:10 +0000 UTC" firstStartedPulling="2025-10-11 05:05:12.140404398 +0000 UTC m=+833.036637194" lastFinishedPulling="2025-10-11 05:05:21.600905086 +0000 UTC m=+842.497137882" observedRunningTime="2025-10-11 05:05:23.880024699 +0000 UTC m=+844.776257515" watchObservedRunningTime="2025-10-11 05:05:23.880173963 +0000 UTC m=+844.776406759" Oct 11 05:05:24 crc kubenswrapper[4651]: I1011 05:05:24.570385 4651 generic.go:334] "Generic (PLEG): container finished" podID="83ff9f8d-04fd-4fa5-894e-4e63202fae9c" containerID="ee38af39da309ac26a2866498f1a82f3998b8dfdab99753aa431db5c7baa1243" exitCode=0 Oct 11 05:05:24 crc kubenswrapper[4651]: I1011 05:05:24.570677 4651 generic.go:334] "Generic (PLEG): container finished" podID="83ff9f8d-04fd-4fa5-894e-4e63202fae9c" containerID="4e77a31da7d465fa4ae8c5b487535266bfc866bdf12a0225b4002809e79b66de" exitCode=0 Oct 11 05:05:24 crc kubenswrapper[4651]: I1011 05:05:24.570465 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vgwd5" event={"ID":"83ff9f8d-04fd-4fa5-894e-4e63202fae9c","Type":"ContainerDied","Data":"ee38af39da309ac26a2866498f1a82f3998b8dfdab99753aa431db5c7baa1243"} Oct 11 05:05:24 crc kubenswrapper[4651]: I1011 05:05:24.570782 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vgwd5" event={"ID":"83ff9f8d-04fd-4fa5-894e-4e63202fae9c","Type":"ContainerDied","Data":"4e77a31da7d465fa4ae8c5b487535266bfc866bdf12a0225b4002809e79b66de"} Oct 11 05:05:24 crc kubenswrapper[4651]: I1011 05:05:24.572904 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-6d74794d9b-c9bzn" event={"ID":"dd5122d9-c098-49cf-9723-bc0c31c6ce3b","Type":"ContainerStarted","Data":"ecdbc7a72d95f4a8f20b3f47f63ca994f2db75d9b2a82d1dc590c7ebf8d091f9"} Oct 11 05:05:24 crc kubenswrapper[4651]: I1011 05:05:24.574795 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-59cdc64769-xtlln" event={"ID":"b5b061bd-7d85-4960-a956-95c7911591a2","Type":"ContainerStarted","Data":"261eef16abe5b9b6f75ec7fe2fea5c5d40595693b9a721645117fc4016c1ba5b"} Oct 11 05:05:24 crc kubenswrapper[4651]: I1011 05:05:24.579187 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-869cc7797f-6m2qz" event={"ID":"c9dabf8d-2991-4af0-99c8-084e157e9b52","Type":"ContainerStarted","Data":"cb7a21c69a9de4ab939a7702df8fd9ac3297292ba692369fb3b06b7482d22059"} Oct 11 05:05:24 crc kubenswrapper[4651]: I1011 05:05:24.579349 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-869cc7797f-6m2qz" Oct 11 05:05:24 crc kubenswrapper[4651]: I1011 05:05:24.582240 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bwlhb" event={"ID":"15f267a4-a699-4088-8769-7922096fc774","Type":"ContainerStarted","Data":"77907cc61148ca05f85ef3c0da7f0df1543a2c243db3443af791039b112ad3f5"} Oct 11 05:05:24 crc kubenswrapper[4651]: I1011 05:05:24.583701 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-578874c84d-x9x8w" event={"ID":"555de60b-f68b-42a6-a662-d1e5202a30c5","Type":"ContainerStarted","Data":"5b0de3dc493ebe95817c35cc1ab76cf35c073c1155a0d6680f6ae7ebf69a599d"} Oct 11 05:05:24 crc kubenswrapper[4651]: I1011 05:05:24.616247 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-59cdc64769-xtlln" podStartSLOduration=4.974935675 podStartE2EDuration="15.616227393s" podCreationTimestamp="2025-10-11 05:05:09 +0000 UTC" firstStartedPulling="2025-10-11 05:05:11.028592899 +0000 UTC m=+831.924825695" lastFinishedPulling="2025-10-11 05:05:21.669884617 +0000 UTC m=+842.566117413" observedRunningTime="2025-10-11 05:05:24.611852591 +0000 UTC m=+845.508085407" watchObservedRunningTime="2025-10-11 05:05:24.616227393 +0000 UTC m=+845.512460189" Oct 11 05:05:24 crc kubenswrapper[4651]: I1011 05:05:24.658564 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-578874c84d-x9x8w" podStartSLOduration=5.199760745 podStartE2EDuration="14.65854962s" podCreationTimestamp="2025-10-11 05:05:10 +0000 UTC" firstStartedPulling="2025-10-11 05:05:12.141212708 +0000 UTC m=+833.037445504" lastFinishedPulling="2025-10-11 05:05:21.600001583 +0000 UTC m=+842.496234379" observedRunningTime="2025-10-11 05:05:24.656481987 +0000 UTC m=+845.552714803" watchObservedRunningTime="2025-10-11 05:05:24.65854962 +0000 UTC m=+845.554782416" Oct 11 05:05:24 crc kubenswrapper[4651]: I1011 05:05:24.674708 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-869cc7797f-6m2qz" podStartSLOduration=5.202044205 podStartE2EDuration="14.674692491s" podCreationTimestamp="2025-10-11 05:05:10 +0000 UTC" firstStartedPulling="2025-10-11 05:05:12.140784418 +0000 UTC m=+833.037017214" lastFinishedPulling="2025-10-11 05:05:21.613432704 +0000 UTC m=+842.509665500" observedRunningTime="2025-10-11 05:05:24.670640138 +0000 UTC m=+845.566872944" watchObservedRunningTime="2025-10-11 05:05:24.674692491 +0000 UTC m=+845.570925287" Oct 11 05:05:25 crc kubenswrapper[4651]: I1011 05:05:25.592130 4651 generic.go:334] "Generic (PLEG): container finished" podID="15f267a4-a699-4088-8769-7922096fc774" containerID="77907cc61148ca05f85ef3c0da7f0df1543a2c243db3443af791039b112ad3f5" exitCode=0 Oct 11 05:05:25 crc kubenswrapper[4651]: I1011 05:05:25.592203 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bwlhb" event={"ID":"15f267a4-a699-4088-8769-7922096fc774","Type":"ContainerDied","Data":"77907cc61148ca05f85ef3c0da7f0df1543a2c243db3443af791039b112ad3f5"} Oct 11 05:05:25 crc kubenswrapper[4651]: I1011 05:05:25.594896 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vgwd5" event={"ID":"83ff9f8d-04fd-4fa5-894e-4e63202fae9c","Type":"ContainerStarted","Data":"89969c18e35c60773727dbd85dfcf4c4e00a02f86be656e6e8e8427829cd0c09"} Oct 11 05:05:25 crc kubenswrapper[4651]: I1011 05:05:25.596120 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-578874c84d-x9x8w" Oct 11 05:05:25 crc kubenswrapper[4651]: I1011 05:05:25.596161 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-59cdc64769-xtlln" Oct 11 05:05:25 crc kubenswrapper[4651]: I1011 05:05:25.633775 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-vgwd5" podStartSLOduration=3.125148247 podStartE2EDuration="4.633750086s" podCreationTimestamp="2025-10-11 05:05:21 +0000 UTC" firstStartedPulling="2025-10-11 05:05:23.472426827 +0000 UTC m=+844.368659623" lastFinishedPulling="2025-10-11 05:05:24.981028666 +0000 UTC m=+845.877261462" observedRunningTime="2025-10-11 05:05:25.630363609 +0000 UTC m=+846.526596415" watchObservedRunningTime="2025-10-11 05:05:25.633750086 +0000 UTC m=+846.529982912" Oct 11 05:05:26 crc kubenswrapper[4651]: I1011 05:05:26.605937 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bwlhb" event={"ID":"15f267a4-a699-4088-8769-7922096fc774","Type":"ContainerStarted","Data":"1cf04bdd59ca2ee81f2751b6f6d3c3f4601a913c68e86149e0aa698f7e368fc5"} Oct 11 05:05:26 crc kubenswrapper[4651]: I1011 05:05:26.670556 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-bwlhb" podStartSLOduration=5.903187729 podStartE2EDuration="8.670532337s" podCreationTimestamp="2025-10-11 05:05:18 +0000 UTC" firstStartedPulling="2025-10-11 05:05:23.490928188 +0000 UTC m=+844.387160984" lastFinishedPulling="2025-10-11 05:05:26.258272796 +0000 UTC m=+847.154505592" observedRunningTime="2025-10-11 05:05:26.66752756 +0000 UTC m=+847.563760376" watchObservedRunningTime="2025-10-11 05:05:26.670532337 +0000 UTC m=+847.566765173" Oct 11 05:05:29 crc kubenswrapper[4651]: I1011 05:05:29.147877 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-bwlhb" Oct 11 05:05:29 crc kubenswrapper[4651]: I1011 05:05:29.148165 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-bwlhb" Oct 11 05:05:29 crc kubenswrapper[4651]: I1011 05:05:29.626598 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-797d478b46-xgxfd" event={"ID":"fcc9418a-3e9d-4c74-849d-b9884077820c","Type":"ContainerStarted","Data":"40879cfffa6c3764a6560a354194e47575b2a4e210d163a1c7ab3e7a1b542a49"} Oct 11 05:05:29 crc kubenswrapper[4651]: I1011 05:05:29.627064 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-797d478b46-xgxfd" Oct 11 05:05:29 crc kubenswrapper[4651]: I1011 05:05:29.631411 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6cc7fb757dxbxc6" event={"ID":"9105b503-cbba-48d1-acdb-ac21b7c791b4","Type":"ContainerStarted","Data":"d60ad528905625ee40fdc6847c56cad0fdaa90dee6743ed63bc00614f94ff7f7"} Oct 11 05:05:29 crc kubenswrapper[4651]: I1011 05:05:29.631642 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6cc7fb757dxbxc6" Oct 11 05:05:29 crc kubenswrapper[4651]: I1011 05:05:29.650633 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-797d478b46-xgxfd" podStartSLOduration=3.1464200350000002 podStartE2EDuration="19.650608998s" podCreationTimestamp="2025-10-11 05:05:10 +0000 UTC" firstStartedPulling="2025-10-11 05:05:12.167595768 +0000 UTC m=+833.063828564" lastFinishedPulling="2025-10-11 05:05:28.671784741 +0000 UTC m=+849.568017527" observedRunningTime="2025-10-11 05:05:29.642405329 +0000 UTC m=+850.538638135" watchObservedRunningTime="2025-10-11 05:05:29.650608998 +0000 UTC m=+850.546841814" Oct 11 05:05:29 crc kubenswrapper[4651]: I1011 05:05:29.669110 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6cc7fb757dxbxc6" podStartSLOduration=3.180580875 podStartE2EDuration="19.669090659s" podCreationTimestamp="2025-10-11 05:05:10 +0000 UTC" firstStartedPulling="2025-10-11 05:05:12.162025207 +0000 UTC m=+833.058258003" lastFinishedPulling="2025-10-11 05:05:28.650534991 +0000 UTC m=+849.546767787" observedRunningTime="2025-10-11 05:05:29.66522113 +0000 UTC m=+850.561453926" watchObservedRunningTime="2025-10-11 05:05:29.669090659 +0000 UTC m=+850.565323455" Oct 11 05:05:30 crc kubenswrapper[4651]: I1011 05:05:30.191407 4651 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-bwlhb" podUID="15f267a4-a699-4088-8769-7922096fc774" containerName="registry-server" probeResult="failure" output=< Oct 11 05:05:30 crc kubenswrapper[4651]: timeout: failed to connect service ":50051" within 1s Oct 11 05:05:30 crc kubenswrapper[4651]: > Oct 11 05:05:30 crc kubenswrapper[4651]: I1011 05:05:30.272289 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-64f84fcdbb-gbn6q" Oct 11 05:05:30 crc kubenswrapper[4651]: I1011 05:05:30.292864 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-59cdc64769-xtlln" Oct 11 05:05:30 crc kubenswrapper[4651]: I1011 05:05:30.327499 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-687df44cdb-g8m5f" Oct 11 05:05:30 crc kubenswrapper[4651]: I1011 05:05:30.341138 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-7bb46cd7d-t88s8" Oct 11 05:05:30 crc kubenswrapper[4651]: I1011 05:05:30.415946 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-6d9967f8dd-prncj" Oct 11 05:05:30 crc kubenswrapper[4651]: I1011 05:05:30.441097 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-6d74794d9b-c9bzn" Oct 11 05:05:30 crc kubenswrapper[4651]: I1011 05:05:30.463731 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-74cb5cbc49-fwr7j" Oct 11 05:05:30 crc kubenswrapper[4651]: I1011 05:05:30.555756 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-ddb98f99b-989mj" Oct 11 05:05:30 crc kubenswrapper[4651]: I1011 05:05:30.663894 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-59578bc799-9d9sm" Oct 11 05:05:30 crc kubenswrapper[4651]: I1011 05:05:30.686493 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-5777b4f897-zdcb9" Oct 11 05:05:30 crc kubenswrapper[4651]: I1011 05:05:30.725177 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-57bb74c7bf-7mhch" Oct 11 05:05:30 crc kubenswrapper[4651]: I1011 05:05:30.795439 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-869cc7797f-6m2qz" Oct 11 05:05:31 crc kubenswrapper[4651]: I1011 05:05:31.015642 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-578874c84d-x9x8w" Oct 11 05:05:31 crc kubenswrapper[4651]: I1011 05:05:31.073327 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-6d7c7ddf95-27r6g" Oct 11 05:05:31 crc kubenswrapper[4651]: I1011 05:05:31.148434 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-664664cb68-5s2zz" Oct 11 05:05:31 crc kubenswrapper[4651]: I1011 05:05:31.542112 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-vgwd5" Oct 11 05:05:31 crc kubenswrapper[4651]: I1011 05:05:31.542526 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-vgwd5" Oct 11 05:05:31 crc kubenswrapper[4651]: I1011 05:05:31.598295 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-vgwd5" Oct 11 05:05:31 crc kubenswrapper[4651]: I1011 05:05:31.694350 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-vgwd5" Oct 11 05:05:31 crc kubenswrapper[4651]: I1011 05:05:31.829564 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-vgwd5"] Oct 11 05:05:33 crc kubenswrapper[4651]: I1011 05:05:33.661589 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-vgwd5" podUID="83ff9f8d-04fd-4fa5-894e-4e63202fae9c" containerName="registry-server" containerID="cri-o://89969c18e35c60773727dbd85dfcf4c4e00a02f86be656e6e8e8427829cd0c09" gracePeriod=2 Oct 11 05:05:34 crc kubenswrapper[4651]: I1011 05:05:34.670846 4651 generic.go:334] "Generic (PLEG): container finished" podID="83ff9f8d-04fd-4fa5-894e-4e63202fae9c" containerID="89969c18e35c60773727dbd85dfcf4c4e00a02f86be656e6e8e8427829cd0c09" exitCode=0 Oct 11 05:05:34 crc kubenswrapper[4651]: I1011 05:05:34.670872 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vgwd5" event={"ID":"83ff9f8d-04fd-4fa5-894e-4e63202fae9c","Type":"ContainerDied","Data":"89969c18e35c60773727dbd85dfcf4c4e00a02f86be656e6e8e8427829cd0c09"} Oct 11 05:05:37 crc kubenswrapper[4651]: I1011 05:05:37.697436 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-dfqdz" event={"ID":"bd36db45-dfcf-4d27-8bfb-fcefeff7f0ba","Type":"ContainerStarted","Data":"8a9ccdd7f0c4a115423750991a42342eafe6cacb48f18854cc88ae0b35ab48e5"} Oct 11 05:05:37 crc kubenswrapper[4651]: I1011 05:05:37.705724 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-646675d848-7w8pk" event={"ID":"9bfc9d92-8b6d-4b13-9759-b7185e1f16bb","Type":"ContainerStarted","Data":"60b24a3c0cf31dd50b88310e7bb4a4812f3be283a72e472ffd22e80910a68c30"} Oct 11 05:05:37 crc kubenswrapper[4651]: I1011 05:05:37.713113 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-8678f847b6-vpnkk" event={"ID":"50a25e99-d2ec-4b16-a5fa-894e79ee528e","Type":"ContainerStarted","Data":"08e8d88a208950c5af0ef72a32feeaba2535748caff8d05c199d23173b91142f"} Oct 11 05:05:37 crc kubenswrapper[4651]: I1011 05:05:37.713324 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-8678f847b6-vpnkk" Oct 11 05:05:37 crc kubenswrapper[4651]: I1011 05:05:37.718656 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f4d5dfdc6-vmqd8" event={"ID":"f4175bba-9aae-4faf-8670-f612f867827e","Type":"ContainerStarted","Data":"fe5934b49f1252f6421b73516dcce5460195a07625d129a1ac333fac6c0f7e7b"} Oct 11 05:05:37 crc kubenswrapper[4651]: I1011 05:05:37.719269 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-5f4d5dfdc6-vmqd8" Oct 11 05:05:37 crc kubenswrapper[4651]: I1011 05:05:37.720577 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-dfqdz" podStartSLOduration=7.090154945 podStartE2EDuration="27.720564467s" podCreationTimestamp="2025-10-11 05:05:10 +0000 UTC" firstStartedPulling="2025-10-11 05:05:12.195133377 +0000 UTC m=+833.091366173" lastFinishedPulling="2025-10-11 05:05:32.825542889 +0000 UTC m=+853.721775695" observedRunningTime="2025-10-11 05:05:37.716757761 +0000 UTC m=+858.612990567" watchObservedRunningTime="2025-10-11 05:05:37.720564467 +0000 UTC m=+858.616797263" Oct 11 05:05:37 crc kubenswrapper[4651]: I1011 05:05:37.722162 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-ffcdd6c94-p9d4d" event={"ID":"0e634116-91fc-4fad-b906-a998e77ea3e4","Type":"ContainerStarted","Data":"a6983186b8034054ef0bd6c3299c03860ed77bff1d206d383f63edc26f9d4880"} Oct 11 05:05:37 crc kubenswrapper[4651]: I1011 05:05:37.722388 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-ffcdd6c94-p9d4d" Oct 11 05:05:37 crc kubenswrapper[4651]: I1011 05:05:37.734903 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-8678f847b6-vpnkk" podStartSLOduration=7.097765059 podStartE2EDuration="27.734888542s" podCreationTimestamp="2025-10-11 05:05:10 +0000 UTC" firstStartedPulling="2025-10-11 05:05:12.189311919 +0000 UTC m=+833.085544715" lastFinishedPulling="2025-10-11 05:05:32.826435402 +0000 UTC m=+853.722668198" observedRunningTime="2025-10-11 05:05:37.733514397 +0000 UTC m=+858.629747203" watchObservedRunningTime="2025-10-11 05:05:37.734888542 +0000 UTC m=+858.631121338" Oct 11 05:05:37 crc kubenswrapper[4651]: I1011 05:05:37.796038 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-5f4d5dfdc6-vmqd8" podStartSLOduration=7.096716185 podStartE2EDuration="27.796014117s" podCreationTimestamp="2025-10-11 05:05:10 +0000 UTC" firstStartedPulling="2025-10-11 05:05:12.153160402 +0000 UTC m=+833.049393198" lastFinishedPulling="2025-10-11 05:05:32.852458324 +0000 UTC m=+853.748691130" observedRunningTime="2025-10-11 05:05:37.760593176 +0000 UTC m=+858.656825982" watchObservedRunningTime="2025-10-11 05:05:37.796014117 +0000 UTC m=+858.692246913" Oct 11 05:05:37 crc kubenswrapper[4651]: I1011 05:05:37.803957 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-ffcdd6c94-p9d4d" podStartSLOduration=7.167541756 podStartE2EDuration="27.803920129s" podCreationTimestamp="2025-10-11 05:05:10 +0000 UTC" firstStartedPulling="2025-10-11 05:05:12.189162966 +0000 UTC m=+833.085395762" lastFinishedPulling="2025-10-11 05:05:32.825541339 +0000 UTC m=+853.721774135" observedRunningTime="2025-10-11 05:05:37.791572534 +0000 UTC m=+858.687805350" watchObservedRunningTime="2025-10-11 05:05:37.803920129 +0000 UTC m=+858.700152945" Oct 11 05:05:37 crc kubenswrapper[4651]: I1011 05:05:37.997235 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vgwd5" Oct 11 05:05:38 crc kubenswrapper[4651]: I1011 05:05:38.179630 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/83ff9f8d-04fd-4fa5-894e-4e63202fae9c-utilities\") pod \"83ff9f8d-04fd-4fa5-894e-4e63202fae9c\" (UID: \"83ff9f8d-04fd-4fa5-894e-4e63202fae9c\") " Oct 11 05:05:38 crc kubenswrapper[4651]: I1011 05:05:38.179729 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/83ff9f8d-04fd-4fa5-894e-4e63202fae9c-catalog-content\") pod \"83ff9f8d-04fd-4fa5-894e-4e63202fae9c\" (UID: \"83ff9f8d-04fd-4fa5-894e-4e63202fae9c\") " Oct 11 05:05:38 crc kubenswrapper[4651]: I1011 05:05:38.179802 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8pxpw\" (UniqueName: \"kubernetes.io/projected/83ff9f8d-04fd-4fa5-894e-4e63202fae9c-kube-api-access-8pxpw\") pod \"83ff9f8d-04fd-4fa5-894e-4e63202fae9c\" (UID: \"83ff9f8d-04fd-4fa5-894e-4e63202fae9c\") " Oct 11 05:05:38 crc kubenswrapper[4651]: I1011 05:05:38.180579 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/83ff9f8d-04fd-4fa5-894e-4e63202fae9c-utilities" (OuterVolumeSpecName: "utilities") pod "83ff9f8d-04fd-4fa5-894e-4e63202fae9c" (UID: "83ff9f8d-04fd-4fa5-894e-4e63202fae9c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 05:05:38 crc kubenswrapper[4651]: I1011 05:05:38.187175 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/83ff9f8d-04fd-4fa5-894e-4e63202fae9c-kube-api-access-8pxpw" (OuterVolumeSpecName: "kube-api-access-8pxpw") pod "83ff9f8d-04fd-4fa5-894e-4e63202fae9c" (UID: "83ff9f8d-04fd-4fa5-894e-4e63202fae9c"). InnerVolumeSpecName "kube-api-access-8pxpw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:05:38 crc kubenswrapper[4651]: I1011 05:05:38.238844 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/83ff9f8d-04fd-4fa5-894e-4e63202fae9c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "83ff9f8d-04fd-4fa5-894e-4e63202fae9c" (UID: "83ff9f8d-04fd-4fa5-894e-4e63202fae9c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 05:05:38 crc kubenswrapper[4651]: I1011 05:05:38.281625 4651 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/83ff9f8d-04fd-4fa5-894e-4e63202fae9c-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 11 05:05:38 crc kubenswrapper[4651]: I1011 05:05:38.281652 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8pxpw\" (UniqueName: \"kubernetes.io/projected/83ff9f8d-04fd-4fa5-894e-4e63202fae9c-kube-api-access-8pxpw\") on node \"crc\" DevicePath \"\"" Oct 11 05:05:38 crc kubenswrapper[4651]: I1011 05:05:38.281663 4651 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/83ff9f8d-04fd-4fa5-894e-4e63202fae9c-utilities\") on node \"crc\" DevicePath \"\"" Oct 11 05:05:38 crc kubenswrapper[4651]: I1011 05:05:38.730789 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vgwd5" event={"ID":"83ff9f8d-04fd-4fa5-894e-4e63202fae9c","Type":"ContainerDied","Data":"963faab473e51d92eedf8a8af3d2d9e5150bdedea2aa8a90b8e9daac52eec6ca"} Oct 11 05:05:38 crc kubenswrapper[4651]: I1011 05:05:38.730908 4651 scope.go:117] "RemoveContainer" containerID="89969c18e35c60773727dbd85dfcf4c4e00a02f86be656e6e8e8427829cd0c09" Oct 11 05:05:38 crc kubenswrapper[4651]: I1011 05:05:38.730932 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vgwd5" Oct 11 05:05:38 crc kubenswrapper[4651]: I1011 05:05:38.753601 4651 scope.go:117] "RemoveContainer" containerID="4e77a31da7d465fa4ae8c5b487535266bfc866bdf12a0225b4002809e79b66de" Oct 11 05:05:38 crc kubenswrapper[4651]: I1011 05:05:38.754338 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-646675d848-7w8pk" podStartSLOduration=8.11874795 podStartE2EDuration="28.754322603s" podCreationTimestamp="2025-10-11 05:05:10 +0000 UTC" firstStartedPulling="2025-10-11 05:05:12.190627033 +0000 UTC m=+833.086859829" lastFinishedPulling="2025-10-11 05:05:32.826201686 +0000 UTC m=+853.722434482" observedRunningTime="2025-10-11 05:05:38.748255528 +0000 UTC m=+859.644488334" watchObservedRunningTime="2025-10-11 05:05:38.754322603 +0000 UTC m=+859.650555399" Oct 11 05:05:38 crc kubenswrapper[4651]: I1011 05:05:38.778723 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-vgwd5"] Oct 11 05:05:38 crc kubenswrapper[4651]: I1011 05:05:38.782932 4651 scope.go:117] "RemoveContainer" containerID="ee38af39da309ac26a2866498f1a82f3998b8dfdab99753aa431db5c7baa1243" Oct 11 05:05:38 crc kubenswrapper[4651]: I1011 05:05:38.784436 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-vgwd5"] Oct 11 05:05:39 crc kubenswrapper[4651]: I1011 05:05:39.213050 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-bwlhb" Oct 11 05:05:39 crc kubenswrapper[4651]: I1011 05:05:39.285665 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-bwlhb" Oct 11 05:05:39 crc kubenswrapper[4651]: I1011 05:05:39.886858 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="83ff9f8d-04fd-4fa5-894e-4e63202fae9c" path="/var/lib/kubelet/pods/83ff9f8d-04fd-4fa5-894e-4e63202fae9c/volumes" Oct 11 05:05:40 crc kubenswrapper[4651]: I1011 05:05:40.234702 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-bwlhb"] Oct 11 05:05:40 crc kubenswrapper[4651]: I1011 05:05:40.721031 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-797d478b46-xgxfd" Oct 11 05:05:40 crc kubenswrapper[4651]: I1011 05:05:40.745568 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-bwlhb" podUID="15f267a4-a699-4088-8769-7922096fc774" containerName="registry-server" containerID="cri-o://1cf04bdd59ca2ee81f2751b6f6d3c3f4601a913c68e86149e0aa698f7e368fc5" gracePeriod=2 Oct 11 05:05:41 crc kubenswrapper[4651]: I1011 05:05:41.133003 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bwlhb" Oct 11 05:05:41 crc kubenswrapper[4651]: I1011 05:05:41.161688 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-646675d848-7w8pk" Oct 11 05:05:41 crc kubenswrapper[4651]: I1011 05:05:41.322658 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wn6l8\" (UniqueName: \"kubernetes.io/projected/15f267a4-a699-4088-8769-7922096fc774-kube-api-access-wn6l8\") pod \"15f267a4-a699-4088-8769-7922096fc774\" (UID: \"15f267a4-a699-4088-8769-7922096fc774\") " Oct 11 05:05:41 crc kubenswrapper[4651]: I1011 05:05:41.322837 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/15f267a4-a699-4088-8769-7922096fc774-utilities\") pod \"15f267a4-a699-4088-8769-7922096fc774\" (UID: \"15f267a4-a699-4088-8769-7922096fc774\") " Oct 11 05:05:41 crc kubenswrapper[4651]: I1011 05:05:41.322869 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/15f267a4-a699-4088-8769-7922096fc774-catalog-content\") pod \"15f267a4-a699-4088-8769-7922096fc774\" (UID: \"15f267a4-a699-4088-8769-7922096fc774\") " Oct 11 05:05:41 crc kubenswrapper[4651]: I1011 05:05:41.324487 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/15f267a4-a699-4088-8769-7922096fc774-utilities" (OuterVolumeSpecName: "utilities") pod "15f267a4-a699-4088-8769-7922096fc774" (UID: "15f267a4-a699-4088-8769-7922096fc774"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 05:05:41 crc kubenswrapper[4651]: I1011 05:05:41.340527 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/15f267a4-a699-4088-8769-7922096fc774-kube-api-access-wn6l8" (OuterVolumeSpecName: "kube-api-access-wn6l8") pod "15f267a4-a699-4088-8769-7922096fc774" (UID: "15f267a4-a699-4088-8769-7922096fc774"). InnerVolumeSpecName "kube-api-access-wn6l8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:05:41 crc kubenswrapper[4651]: I1011 05:05:41.410232 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6cc7fb757dxbxc6" Oct 11 05:05:41 crc kubenswrapper[4651]: I1011 05:05:41.425545 4651 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/15f267a4-a699-4088-8769-7922096fc774-utilities\") on node \"crc\" DevicePath \"\"" Oct 11 05:05:41 crc kubenswrapper[4651]: I1011 05:05:41.425569 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wn6l8\" (UniqueName: \"kubernetes.io/projected/15f267a4-a699-4088-8769-7922096fc774-kube-api-access-wn6l8\") on node \"crc\" DevicePath \"\"" Oct 11 05:05:41 crc kubenswrapper[4651]: I1011 05:05:41.428368 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/15f267a4-a699-4088-8769-7922096fc774-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "15f267a4-a699-4088-8769-7922096fc774" (UID: "15f267a4-a699-4088-8769-7922096fc774"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 05:05:41 crc kubenswrapper[4651]: I1011 05:05:41.526792 4651 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/15f267a4-a699-4088-8769-7922096fc774-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 11 05:05:41 crc kubenswrapper[4651]: I1011 05:05:41.757294 4651 generic.go:334] "Generic (PLEG): container finished" podID="15f267a4-a699-4088-8769-7922096fc774" containerID="1cf04bdd59ca2ee81f2751b6f6d3c3f4601a913c68e86149e0aa698f7e368fc5" exitCode=0 Oct 11 05:05:41 crc kubenswrapper[4651]: I1011 05:05:41.757373 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bwlhb" Oct 11 05:05:41 crc kubenswrapper[4651]: I1011 05:05:41.757391 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bwlhb" event={"ID":"15f267a4-a699-4088-8769-7922096fc774","Type":"ContainerDied","Data":"1cf04bdd59ca2ee81f2751b6f6d3c3f4601a913c68e86149e0aa698f7e368fc5"} Oct 11 05:05:41 crc kubenswrapper[4651]: I1011 05:05:41.757984 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bwlhb" event={"ID":"15f267a4-a699-4088-8769-7922096fc774","Type":"ContainerDied","Data":"785328cc5f490b88d9987c29a05c75972ac3e28ac69dd6e04aeed276c09bdf8a"} Oct 11 05:05:41 crc kubenswrapper[4651]: I1011 05:05:41.758039 4651 scope.go:117] "RemoveContainer" containerID="1cf04bdd59ca2ee81f2751b6f6d3c3f4601a913c68e86149e0aa698f7e368fc5" Oct 11 05:05:41 crc kubenswrapper[4651]: I1011 05:05:41.788704 4651 scope.go:117] "RemoveContainer" containerID="77907cc61148ca05f85ef3c0da7f0df1543a2c243db3443af791039b112ad3f5" Oct 11 05:05:41 crc kubenswrapper[4651]: I1011 05:05:41.791741 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-bwlhb"] Oct 11 05:05:41 crc kubenswrapper[4651]: I1011 05:05:41.800987 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-bwlhb"] Oct 11 05:05:41 crc kubenswrapper[4651]: I1011 05:05:41.818716 4651 scope.go:117] "RemoveContainer" containerID="64927d916fefacf0e307766aa7691b95292dc09219a2ccf75df41d51dde65d09" Oct 11 05:05:41 crc kubenswrapper[4651]: I1011 05:05:41.843907 4651 scope.go:117] "RemoveContainer" containerID="1cf04bdd59ca2ee81f2751b6f6d3c3f4601a913c68e86149e0aa698f7e368fc5" Oct 11 05:05:41 crc kubenswrapper[4651]: E1011 05:05:41.846527 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1cf04bdd59ca2ee81f2751b6f6d3c3f4601a913c68e86149e0aa698f7e368fc5\": container with ID starting with 1cf04bdd59ca2ee81f2751b6f6d3c3f4601a913c68e86149e0aa698f7e368fc5 not found: ID does not exist" containerID="1cf04bdd59ca2ee81f2751b6f6d3c3f4601a913c68e86149e0aa698f7e368fc5" Oct 11 05:05:41 crc kubenswrapper[4651]: I1011 05:05:41.846603 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1cf04bdd59ca2ee81f2751b6f6d3c3f4601a913c68e86149e0aa698f7e368fc5"} err="failed to get container status \"1cf04bdd59ca2ee81f2751b6f6d3c3f4601a913c68e86149e0aa698f7e368fc5\": rpc error: code = NotFound desc = could not find container \"1cf04bdd59ca2ee81f2751b6f6d3c3f4601a913c68e86149e0aa698f7e368fc5\": container with ID starting with 1cf04bdd59ca2ee81f2751b6f6d3c3f4601a913c68e86149e0aa698f7e368fc5 not found: ID does not exist" Oct 11 05:05:41 crc kubenswrapper[4651]: I1011 05:05:41.846643 4651 scope.go:117] "RemoveContainer" containerID="77907cc61148ca05f85ef3c0da7f0df1543a2c243db3443af791039b112ad3f5" Oct 11 05:05:41 crc kubenswrapper[4651]: E1011 05:05:41.847308 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"77907cc61148ca05f85ef3c0da7f0df1543a2c243db3443af791039b112ad3f5\": container with ID starting with 77907cc61148ca05f85ef3c0da7f0df1543a2c243db3443af791039b112ad3f5 not found: ID does not exist" containerID="77907cc61148ca05f85ef3c0da7f0df1543a2c243db3443af791039b112ad3f5" Oct 11 05:05:41 crc kubenswrapper[4651]: I1011 05:05:41.847403 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"77907cc61148ca05f85ef3c0da7f0df1543a2c243db3443af791039b112ad3f5"} err="failed to get container status \"77907cc61148ca05f85ef3c0da7f0df1543a2c243db3443af791039b112ad3f5\": rpc error: code = NotFound desc = could not find container \"77907cc61148ca05f85ef3c0da7f0df1543a2c243db3443af791039b112ad3f5\": container with ID starting with 77907cc61148ca05f85ef3c0da7f0df1543a2c243db3443af791039b112ad3f5 not found: ID does not exist" Oct 11 05:05:41 crc kubenswrapper[4651]: I1011 05:05:41.847479 4651 scope.go:117] "RemoveContainer" containerID="64927d916fefacf0e307766aa7691b95292dc09219a2ccf75df41d51dde65d09" Oct 11 05:05:41 crc kubenswrapper[4651]: E1011 05:05:41.847956 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"64927d916fefacf0e307766aa7691b95292dc09219a2ccf75df41d51dde65d09\": container with ID starting with 64927d916fefacf0e307766aa7691b95292dc09219a2ccf75df41d51dde65d09 not found: ID does not exist" containerID="64927d916fefacf0e307766aa7691b95292dc09219a2ccf75df41d51dde65d09" Oct 11 05:05:41 crc kubenswrapper[4651]: I1011 05:05:41.847998 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"64927d916fefacf0e307766aa7691b95292dc09219a2ccf75df41d51dde65d09"} err="failed to get container status \"64927d916fefacf0e307766aa7691b95292dc09219a2ccf75df41d51dde65d09\": rpc error: code = NotFound desc = could not find container \"64927d916fefacf0e307766aa7691b95292dc09219a2ccf75df41d51dde65d09\": container with ID starting with 64927d916fefacf0e307766aa7691b95292dc09219a2ccf75df41d51dde65d09 not found: ID does not exist" Oct 11 05:05:41 crc kubenswrapper[4651]: I1011 05:05:41.883860 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="15f267a4-a699-4088-8769-7922096fc774" path="/var/lib/kubelet/pods/15f267a4-a699-4088-8769-7922096fc774/volumes" Oct 11 05:05:50 crc kubenswrapper[4651]: I1011 05:05:50.905037 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-5f4d5dfdc6-vmqd8" Oct 11 05:05:51 crc kubenswrapper[4651]: I1011 05:05:51.050438 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-8678f847b6-vpnkk" Oct 11 05:05:51 crc kubenswrapper[4651]: I1011 05:05:51.125311 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-ffcdd6c94-p9d4d" Oct 11 05:05:51 crc kubenswrapper[4651]: I1011 05:05:51.163701 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-646675d848-7w8pk" Oct 11 05:06:06 crc kubenswrapper[4651]: I1011 05:06:06.536767 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-9v2w4"] Oct 11 05:06:06 crc kubenswrapper[4651]: E1011 05:06:06.541232 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="15f267a4-a699-4088-8769-7922096fc774" containerName="registry-server" Oct 11 05:06:06 crc kubenswrapper[4651]: I1011 05:06:06.541252 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="15f267a4-a699-4088-8769-7922096fc774" containerName="registry-server" Oct 11 05:06:06 crc kubenswrapper[4651]: E1011 05:06:06.541301 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="83ff9f8d-04fd-4fa5-894e-4e63202fae9c" containerName="registry-server" Oct 11 05:06:06 crc kubenswrapper[4651]: I1011 05:06:06.541310 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="83ff9f8d-04fd-4fa5-894e-4e63202fae9c" containerName="registry-server" Oct 11 05:06:06 crc kubenswrapper[4651]: E1011 05:06:06.541327 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="15f267a4-a699-4088-8769-7922096fc774" containerName="extract-content" Oct 11 05:06:06 crc kubenswrapper[4651]: I1011 05:06:06.541336 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="15f267a4-a699-4088-8769-7922096fc774" containerName="extract-content" Oct 11 05:06:06 crc kubenswrapper[4651]: E1011 05:06:06.541361 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="15f267a4-a699-4088-8769-7922096fc774" containerName="extract-utilities" Oct 11 05:06:06 crc kubenswrapper[4651]: I1011 05:06:06.541369 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="15f267a4-a699-4088-8769-7922096fc774" containerName="extract-utilities" Oct 11 05:06:06 crc kubenswrapper[4651]: E1011 05:06:06.541395 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="83ff9f8d-04fd-4fa5-894e-4e63202fae9c" containerName="extract-utilities" Oct 11 05:06:06 crc kubenswrapper[4651]: I1011 05:06:06.541402 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="83ff9f8d-04fd-4fa5-894e-4e63202fae9c" containerName="extract-utilities" Oct 11 05:06:06 crc kubenswrapper[4651]: E1011 05:06:06.541413 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="83ff9f8d-04fd-4fa5-894e-4e63202fae9c" containerName="extract-content" Oct 11 05:06:06 crc kubenswrapper[4651]: I1011 05:06:06.541421 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="83ff9f8d-04fd-4fa5-894e-4e63202fae9c" containerName="extract-content" Oct 11 05:06:06 crc kubenswrapper[4651]: I1011 05:06:06.541604 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="83ff9f8d-04fd-4fa5-894e-4e63202fae9c" containerName="registry-server" Oct 11 05:06:06 crc kubenswrapper[4651]: I1011 05:06:06.541618 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="15f267a4-a699-4088-8769-7922096fc774" containerName="registry-server" Oct 11 05:06:06 crc kubenswrapper[4651]: I1011 05:06:06.542511 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-9v2w4" Oct 11 05:06:06 crc kubenswrapper[4651]: I1011 05:06:06.544432 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Oct 11 05:06:06 crc kubenswrapper[4651]: I1011 05:06:06.544432 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-2qxrv" Oct 11 05:06:06 crc kubenswrapper[4651]: I1011 05:06:06.544431 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Oct 11 05:06:06 crc kubenswrapper[4651]: I1011 05:06:06.545582 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Oct 11 05:06:06 crc kubenswrapper[4651]: I1011 05:06:06.546049 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-9v2w4"] Oct 11 05:06:06 crc kubenswrapper[4651]: I1011 05:06:06.594053 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-plnsm"] Oct 11 05:06:06 crc kubenswrapper[4651]: I1011 05:06:06.595434 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-plnsm" Oct 11 05:06:06 crc kubenswrapper[4651]: I1011 05:06:06.600032 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Oct 11 05:06:06 crc kubenswrapper[4651]: I1011 05:06:06.607648 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-plnsm"] Oct 11 05:06:06 crc kubenswrapper[4651]: I1011 05:06:06.609479 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qh7vw\" (UniqueName: \"kubernetes.io/projected/742f177f-d880-4e28-a826-e2e565ebef2e-kube-api-access-qh7vw\") pod \"dnsmasq-dns-675f4bcbfc-9v2w4\" (UID: \"742f177f-d880-4e28-a826-e2e565ebef2e\") " pod="openstack/dnsmasq-dns-675f4bcbfc-9v2w4" Oct 11 05:06:06 crc kubenswrapper[4651]: I1011 05:06:06.609550 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/742f177f-d880-4e28-a826-e2e565ebef2e-config\") pod \"dnsmasq-dns-675f4bcbfc-9v2w4\" (UID: \"742f177f-d880-4e28-a826-e2e565ebef2e\") " pod="openstack/dnsmasq-dns-675f4bcbfc-9v2w4" Oct 11 05:06:06 crc kubenswrapper[4651]: I1011 05:06:06.710347 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/742f177f-d880-4e28-a826-e2e565ebef2e-config\") pod \"dnsmasq-dns-675f4bcbfc-9v2w4\" (UID: \"742f177f-d880-4e28-a826-e2e565ebef2e\") " pod="openstack/dnsmasq-dns-675f4bcbfc-9v2w4" Oct 11 05:06:06 crc kubenswrapper[4651]: I1011 05:06:06.710417 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/83b871e5-d44b-4c41-88ab-f8f04b76d16f-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-plnsm\" (UID: \"83b871e5-d44b-4c41-88ab-f8f04b76d16f\") " pod="openstack/dnsmasq-dns-78dd6ddcc-plnsm" Oct 11 05:06:06 crc kubenswrapper[4651]: I1011 05:06:06.710478 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rlkct\" (UniqueName: \"kubernetes.io/projected/83b871e5-d44b-4c41-88ab-f8f04b76d16f-kube-api-access-rlkct\") pod \"dnsmasq-dns-78dd6ddcc-plnsm\" (UID: \"83b871e5-d44b-4c41-88ab-f8f04b76d16f\") " pod="openstack/dnsmasq-dns-78dd6ddcc-plnsm" Oct 11 05:06:06 crc kubenswrapper[4651]: I1011 05:06:06.710517 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qh7vw\" (UniqueName: \"kubernetes.io/projected/742f177f-d880-4e28-a826-e2e565ebef2e-kube-api-access-qh7vw\") pod \"dnsmasq-dns-675f4bcbfc-9v2w4\" (UID: \"742f177f-d880-4e28-a826-e2e565ebef2e\") " pod="openstack/dnsmasq-dns-675f4bcbfc-9v2w4" Oct 11 05:06:06 crc kubenswrapper[4651]: I1011 05:06:06.710548 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/83b871e5-d44b-4c41-88ab-f8f04b76d16f-config\") pod \"dnsmasq-dns-78dd6ddcc-plnsm\" (UID: \"83b871e5-d44b-4c41-88ab-f8f04b76d16f\") " pod="openstack/dnsmasq-dns-78dd6ddcc-plnsm" Oct 11 05:06:06 crc kubenswrapper[4651]: I1011 05:06:06.711256 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/742f177f-d880-4e28-a826-e2e565ebef2e-config\") pod \"dnsmasq-dns-675f4bcbfc-9v2w4\" (UID: \"742f177f-d880-4e28-a826-e2e565ebef2e\") " pod="openstack/dnsmasq-dns-675f4bcbfc-9v2w4" Oct 11 05:06:06 crc kubenswrapper[4651]: I1011 05:06:06.734855 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qh7vw\" (UniqueName: \"kubernetes.io/projected/742f177f-d880-4e28-a826-e2e565ebef2e-kube-api-access-qh7vw\") pod \"dnsmasq-dns-675f4bcbfc-9v2w4\" (UID: \"742f177f-d880-4e28-a826-e2e565ebef2e\") " pod="openstack/dnsmasq-dns-675f4bcbfc-9v2w4" Oct 11 05:06:06 crc kubenswrapper[4651]: I1011 05:06:06.812038 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/83b871e5-d44b-4c41-88ab-f8f04b76d16f-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-plnsm\" (UID: \"83b871e5-d44b-4c41-88ab-f8f04b76d16f\") " pod="openstack/dnsmasq-dns-78dd6ddcc-plnsm" Oct 11 05:06:06 crc kubenswrapper[4651]: I1011 05:06:06.812128 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rlkct\" (UniqueName: \"kubernetes.io/projected/83b871e5-d44b-4c41-88ab-f8f04b76d16f-kube-api-access-rlkct\") pod \"dnsmasq-dns-78dd6ddcc-plnsm\" (UID: \"83b871e5-d44b-4c41-88ab-f8f04b76d16f\") " pod="openstack/dnsmasq-dns-78dd6ddcc-plnsm" Oct 11 05:06:06 crc kubenswrapper[4651]: I1011 05:06:06.812181 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/83b871e5-d44b-4c41-88ab-f8f04b76d16f-config\") pod \"dnsmasq-dns-78dd6ddcc-plnsm\" (UID: \"83b871e5-d44b-4c41-88ab-f8f04b76d16f\") " pod="openstack/dnsmasq-dns-78dd6ddcc-plnsm" Oct 11 05:06:06 crc kubenswrapper[4651]: I1011 05:06:06.813057 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/83b871e5-d44b-4c41-88ab-f8f04b76d16f-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-plnsm\" (UID: \"83b871e5-d44b-4c41-88ab-f8f04b76d16f\") " pod="openstack/dnsmasq-dns-78dd6ddcc-plnsm" Oct 11 05:06:06 crc kubenswrapper[4651]: I1011 05:06:06.813133 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/83b871e5-d44b-4c41-88ab-f8f04b76d16f-config\") pod \"dnsmasq-dns-78dd6ddcc-plnsm\" (UID: \"83b871e5-d44b-4c41-88ab-f8f04b76d16f\") " pod="openstack/dnsmasq-dns-78dd6ddcc-plnsm" Oct 11 05:06:06 crc kubenswrapper[4651]: I1011 05:06:06.828498 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rlkct\" (UniqueName: \"kubernetes.io/projected/83b871e5-d44b-4c41-88ab-f8f04b76d16f-kube-api-access-rlkct\") pod \"dnsmasq-dns-78dd6ddcc-plnsm\" (UID: \"83b871e5-d44b-4c41-88ab-f8f04b76d16f\") " pod="openstack/dnsmasq-dns-78dd6ddcc-plnsm" Oct 11 05:06:06 crc kubenswrapper[4651]: I1011 05:06:06.859689 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-9v2w4" Oct 11 05:06:06 crc kubenswrapper[4651]: I1011 05:06:06.913443 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-plnsm" Oct 11 05:06:07 crc kubenswrapper[4651]: I1011 05:06:07.314300 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-9v2w4"] Oct 11 05:06:07 crc kubenswrapper[4651]: I1011 05:06:07.324435 4651 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 11 05:06:07 crc kubenswrapper[4651]: I1011 05:06:07.366767 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-plnsm"] Oct 11 05:06:07 crc kubenswrapper[4651]: I1011 05:06:07.993360 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-9v2w4" event={"ID":"742f177f-d880-4e28-a826-e2e565ebef2e","Type":"ContainerStarted","Data":"eb73564d2b2af5e48c1246ceba6213701e2330f5175756502730409f0b10e809"} Oct 11 05:06:07 crc kubenswrapper[4651]: I1011 05:06:07.995681 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-plnsm" event={"ID":"83b871e5-d44b-4c41-88ab-f8f04b76d16f","Type":"ContainerStarted","Data":"c6cdce3beecff0bc1390173e93f846c11a1756d10f3e3d52899ce9b12f3dfa8f"} Oct 11 05:06:09 crc kubenswrapper[4651]: I1011 05:06:09.438027 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-9v2w4"] Oct 11 05:06:09 crc kubenswrapper[4651]: I1011 05:06:09.465621 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-zm69b"] Oct 11 05:06:09 crc kubenswrapper[4651]: I1011 05:06:09.466743 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-zm69b" Oct 11 05:06:09 crc kubenswrapper[4651]: I1011 05:06:09.487656 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-zm69b"] Oct 11 05:06:09 crc kubenswrapper[4651]: I1011 05:06:09.551001 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7855690d-2913-4d7b-81de-f34a5f299431-config\") pod \"dnsmasq-dns-666b6646f7-zm69b\" (UID: \"7855690d-2913-4d7b-81de-f34a5f299431\") " pod="openstack/dnsmasq-dns-666b6646f7-zm69b" Oct 11 05:06:09 crc kubenswrapper[4651]: I1011 05:06:09.551073 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7855690d-2913-4d7b-81de-f34a5f299431-dns-svc\") pod \"dnsmasq-dns-666b6646f7-zm69b\" (UID: \"7855690d-2913-4d7b-81de-f34a5f299431\") " pod="openstack/dnsmasq-dns-666b6646f7-zm69b" Oct 11 05:06:09 crc kubenswrapper[4651]: I1011 05:06:09.551250 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6fdq5\" (UniqueName: \"kubernetes.io/projected/7855690d-2913-4d7b-81de-f34a5f299431-kube-api-access-6fdq5\") pod \"dnsmasq-dns-666b6646f7-zm69b\" (UID: \"7855690d-2913-4d7b-81de-f34a5f299431\") " pod="openstack/dnsmasq-dns-666b6646f7-zm69b" Oct 11 05:06:09 crc kubenswrapper[4651]: I1011 05:06:09.652331 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7855690d-2913-4d7b-81de-f34a5f299431-config\") pod \"dnsmasq-dns-666b6646f7-zm69b\" (UID: \"7855690d-2913-4d7b-81de-f34a5f299431\") " pod="openstack/dnsmasq-dns-666b6646f7-zm69b" Oct 11 05:06:09 crc kubenswrapper[4651]: I1011 05:06:09.652406 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7855690d-2913-4d7b-81de-f34a5f299431-dns-svc\") pod \"dnsmasq-dns-666b6646f7-zm69b\" (UID: \"7855690d-2913-4d7b-81de-f34a5f299431\") " pod="openstack/dnsmasq-dns-666b6646f7-zm69b" Oct 11 05:06:09 crc kubenswrapper[4651]: I1011 05:06:09.652449 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6fdq5\" (UniqueName: \"kubernetes.io/projected/7855690d-2913-4d7b-81de-f34a5f299431-kube-api-access-6fdq5\") pod \"dnsmasq-dns-666b6646f7-zm69b\" (UID: \"7855690d-2913-4d7b-81de-f34a5f299431\") " pod="openstack/dnsmasq-dns-666b6646f7-zm69b" Oct 11 05:06:09 crc kubenswrapper[4651]: I1011 05:06:09.653449 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7855690d-2913-4d7b-81de-f34a5f299431-config\") pod \"dnsmasq-dns-666b6646f7-zm69b\" (UID: \"7855690d-2913-4d7b-81de-f34a5f299431\") " pod="openstack/dnsmasq-dns-666b6646f7-zm69b" Oct 11 05:06:09 crc kubenswrapper[4651]: I1011 05:06:09.654039 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7855690d-2913-4d7b-81de-f34a5f299431-dns-svc\") pod \"dnsmasq-dns-666b6646f7-zm69b\" (UID: \"7855690d-2913-4d7b-81de-f34a5f299431\") " pod="openstack/dnsmasq-dns-666b6646f7-zm69b" Oct 11 05:06:09 crc kubenswrapper[4651]: I1011 05:06:09.676341 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6fdq5\" (UniqueName: \"kubernetes.io/projected/7855690d-2913-4d7b-81de-f34a5f299431-kube-api-access-6fdq5\") pod \"dnsmasq-dns-666b6646f7-zm69b\" (UID: \"7855690d-2913-4d7b-81de-f34a5f299431\") " pod="openstack/dnsmasq-dns-666b6646f7-zm69b" Oct 11 05:06:09 crc kubenswrapper[4651]: I1011 05:06:09.733538 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-plnsm"] Oct 11 05:06:09 crc kubenswrapper[4651]: I1011 05:06:09.761462 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-89rdc"] Oct 11 05:06:09 crc kubenswrapper[4651]: I1011 05:06:09.762769 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-89rdc" Oct 11 05:06:09 crc kubenswrapper[4651]: I1011 05:06:09.766890 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-89rdc"] Oct 11 05:06:09 crc kubenswrapper[4651]: I1011 05:06:09.796662 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-zm69b" Oct 11 05:06:09 crc kubenswrapper[4651]: I1011 05:06:09.854266 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ca84ab98-19fd-4cbf-ab28-acb4fced285f-config\") pod \"dnsmasq-dns-57d769cc4f-89rdc\" (UID: \"ca84ab98-19fd-4cbf-ab28-acb4fced285f\") " pod="openstack/dnsmasq-dns-57d769cc4f-89rdc" Oct 11 05:06:09 crc kubenswrapper[4651]: I1011 05:06:09.854333 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vnw9t\" (UniqueName: \"kubernetes.io/projected/ca84ab98-19fd-4cbf-ab28-acb4fced285f-kube-api-access-vnw9t\") pod \"dnsmasq-dns-57d769cc4f-89rdc\" (UID: \"ca84ab98-19fd-4cbf-ab28-acb4fced285f\") " pod="openstack/dnsmasq-dns-57d769cc4f-89rdc" Oct 11 05:06:09 crc kubenswrapper[4651]: I1011 05:06:09.854373 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ca84ab98-19fd-4cbf-ab28-acb4fced285f-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-89rdc\" (UID: \"ca84ab98-19fd-4cbf-ab28-acb4fced285f\") " pod="openstack/dnsmasq-dns-57d769cc4f-89rdc" Oct 11 05:06:09 crc kubenswrapper[4651]: I1011 05:06:09.957086 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ca84ab98-19fd-4cbf-ab28-acb4fced285f-config\") pod \"dnsmasq-dns-57d769cc4f-89rdc\" (UID: \"ca84ab98-19fd-4cbf-ab28-acb4fced285f\") " pod="openstack/dnsmasq-dns-57d769cc4f-89rdc" Oct 11 05:06:09 crc kubenswrapper[4651]: I1011 05:06:09.957477 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vnw9t\" (UniqueName: \"kubernetes.io/projected/ca84ab98-19fd-4cbf-ab28-acb4fced285f-kube-api-access-vnw9t\") pod \"dnsmasq-dns-57d769cc4f-89rdc\" (UID: \"ca84ab98-19fd-4cbf-ab28-acb4fced285f\") " pod="openstack/dnsmasq-dns-57d769cc4f-89rdc" Oct 11 05:06:09 crc kubenswrapper[4651]: I1011 05:06:09.957543 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ca84ab98-19fd-4cbf-ab28-acb4fced285f-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-89rdc\" (UID: \"ca84ab98-19fd-4cbf-ab28-acb4fced285f\") " pod="openstack/dnsmasq-dns-57d769cc4f-89rdc" Oct 11 05:06:09 crc kubenswrapper[4651]: I1011 05:06:09.958521 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ca84ab98-19fd-4cbf-ab28-acb4fced285f-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-89rdc\" (UID: \"ca84ab98-19fd-4cbf-ab28-acb4fced285f\") " pod="openstack/dnsmasq-dns-57d769cc4f-89rdc" Oct 11 05:06:09 crc kubenswrapper[4651]: I1011 05:06:09.959195 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ca84ab98-19fd-4cbf-ab28-acb4fced285f-config\") pod \"dnsmasq-dns-57d769cc4f-89rdc\" (UID: \"ca84ab98-19fd-4cbf-ab28-acb4fced285f\") " pod="openstack/dnsmasq-dns-57d769cc4f-89rdc" Oct 11 05:06:09 crc kubenswrapper[4651]: I1011 05:06:09.977558 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vnw9t\" (UniqueName: \"kubernetes.io/projected/ca84ab98-19fd-4cbf-ab28-acb4fced285f-kube-api-access-vnw9t\") pod \"dnsmasq-dns-57d769cc4f-89rdc\" (UID: \"ca84ab98-19fd-4cbf-ab28-acb4fced285f\") " pod="openstack/dnsmasq-dns-57d769cc4f-89rdc" Oct 11 05:06:10 crc kubenswrapper[4651]: I1011 05:06:10.078017 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-89rdc" Oct 11 05:06:10 crc kubenswrapper[4651]: I1011 05:06:10.607639 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Oct 11 05:06:10 crc kubenswrapper[4651]: I1011 05:06:10.609507 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 11 05:06:10 crc kubenswrapper[4651]: I1011 05:06:10.611723 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Oct 11 05:06:10 crc kubenswrapper[4651]: I1011 05:06:10.612026 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Oct 11 05:06:10 crc kubenswrapper[4651]: I1011 05:06:10.612197 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-jwjmf" Oct 11 05:06:10 crc kubenswrapper[4651]: I1011 05:06:10.612327 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Oct 11 05:06:10 crc kubenswrapper[4651]: I1011 05:06:10.612354 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Oct 11 05:06:10 crc kubenswrapper[4651]: I1011 05:06:10.612464 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Oct 11 05:06:10 crc kubenswrapper[4651]: I1011 05:06:10.614971 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Oct 11 05:06:10 crc kubenswrapper[4651]: I1011 05:06:10.646443 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 11 05:06:10 crc kubenswrapper[4651]: I1011 05:06:10.671126 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e73b125b-a52b-44bb-bbed-3a484f53a9cb-config-data\") pod \"rabbitmq-server-0\" (UID: \"e73b125b-a52b-44bb-bbed-3a484f53a9cb\") " pod="openstack/rabbitmq-server-0" Oct 11 05:06:10 crc kubenswrapper[4651]: I1011 05:06:10.671184 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/e73b125b-a52b-44bb-bbed-3a484f53a9cb-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"e73b125b-a52b-44bb-bbed-3a484f53a9cb\") " pod="openstack/rabbitmq-server-0" Oct 11 05:06:10 crc kubenswrapper[4651]: I1011 05:06:10.671213 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/e73b125b-a52b-44bb-bbed-3a484f53a9cb-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"e73b125b-a52b-44bb-bbed-3a484f53a9cb\") " pod="openstack/rabbitmq-server-0" Oct 11 05:06:10 crc kubenswrapper[4651]: I1011 05:06:10.671247 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/e73b125b-a52b-44bb-bbed-3a484f53a9cb-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"e73b125b-a52b-44bb-bbed-3a484f53a9cb\") " pod="openstack/rabbitmq-server-0" Oct 11 05:06:10 crc kubenswrapper[4651]: I1011 05:06:10.671277 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/e73b125b-a52b-44bb-bbed-3a484f53a9cb-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"e73b125b-a52b-44bb-bbed-3a484f53a9cb\") " pod="openstack/rabbitmq-server-0" Oct 11 05:06:10 crc kubenswrapper[4651]: I1011 05:06:10.671297 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/e73b125b-a52b-44bb-bbed-3a484f53a9cb-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"e73b125b-a52b-44bb-bbed-3a484f53a9cb\") " pod="openstack/rabbitmq-server-0" Oct 11 05:06:10 crc kubenswrapper[4651]: I1011 05:06:10.671320 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/e73b125b-a52b-44bb-bbed-3a484f53a9cb-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"e73b125b-a52b-44bb-bbed-3a484f53a9cb\") " pod="openstack/rabbitmq-server-0" Oct 11 05:06:10 crc kubenswrapper[4651]: I1011 05:06:10.671373 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/e73b125b-a52b-44bb-bbed-3a484f53a9cb-server-conf\") pod \"rabbitmq-server-0\" (UID: \"e73b125b-a52b-44bb-bbed-3a484f53a9cb\") " pod="openstack/rabbitmq-server-0" Oct 11 05:06:10 crc kubenswrapper[4651]: I1011 05:06:10.671402 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bjgdc\" (UniqueName: \"kubernetes.io/projected/e73b125b-a52b-44bb-bbed-3a484f53a9cb-kube-api-access-bjgdc\") pod \"rabbitmq-server-0\" (UID: \"e73b125b-a52b-44bb-bbed-3a484f53a9cb\") " pod="openstack/rabbitmq-server-0" Oct 11 05:06:10 crc kubenswrapper[4651]: I1011 05:06:10.671433 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/e73b125b-a52b-44bb-bbed-3a484f53a9cb-pod-info\") pod \"rabbitmq-server-0\" (UID: \"e73b125b-a52b-44bb-bbed-3a484f53a9cb\") " pod="openstack/rabbitmq-server-0" Oct 11 05:06:10 crc kubenswrapper[4651]: I1011 05:06:10.671462 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-server-0\" (UID: \"e73b125b-a52b-44bb-bbed-3a484f53a9cb\") " pod="openstack/rabbitmq-server-0" Oct 11 05:06:10 crc kubenswrapper[4651]: I1011 05:06:10.772276 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-server-0\" (UID: \"e73b125b-a52b-44bb-bbed-3a484f53a9cb\") " pod="openstack/rabbitmq-server-0" Oct 11 05:06:10 crc kubenswrapper[4651]: I1011 05:06:10.772342 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e73b125b-a52b-44bb-bbed-3a484f53a9cb-config-data\") pod \"rabbitmq-server-0\" (UID: \"e73b125b-a52b-44bb-bbed-3a484f53a9cb\") " pod="openstack/rabbitmq-server-0" Oct 11 05:06:10 crc kubenswrapper[4651]: I1011 05:06:10.772373 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/e73b125b-a52b-44bb-bbed-3a484f53a9cb-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"e73b125b-a52b-44bb-bbed-3a484f53a9cb\") " pod="openstack/rabbitmq-server-0" Oct 11 05:06:10 crc kubenswrapper[4651]: I1011 05:06:10.772400 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/e73b125b-a52b-44bb-bbed-3a484f53a9cb-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"e73b125b-a52b-44bb-bbed-3a484f53a9cb\") " pod="openstack/rabbitmq-server-0" Oct 11 05:06:10 crc kubenswrapper[4651]: I1011 05:06:10.772433 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/e73b125b-a52b-44bb-bbed-3a484f53a9cb-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"e73b125b-a52b-44bb-bbed-3a484f53a9cb\") " pod="openstack/rabbitmq-server-0" Oct 11 05:06:10 crc kubenswrapper[4651]: I1011 05:06:10.772803 4651 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-server-0\" (UID: \"e73b125b-a52b-44bb-bbed-3a484f53a9cb\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/rabbitmq-server-0" Oct 11 05:06:10 crc kubenswrapper[4651]: I1011 05:06:10.773026 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/e73b125b-a52b-44bb-bbed-3a484f53a9cb-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"e73b125b-a52b-44bb-bbed-3a484f53a9cb\") " pod="openstack/rabbitmq-server-0" Oct 11 05:06:10 crc kubenswrapper[4651]: I1011 05:06:10.773394 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e73b125b-a52b-44bb-bbed-3a484f53a9cb-config-data\") pod \"rabbitmq-server-0\" (UID: \"e73b125b-a52b-44bb-bbed-3a484f53a9cb\") " pod="openstack/rabbitmq-server-0" Oct 11 05:06:10 crc kubenswrapper[4651]: I1011 05:06:10.773526 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/e73b125b-a52b-44bb-bbed-3a484f53a9cb-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"e73b125b-a52b-44bb-bbed-3a484f53a9cb\") " pod="openstack/rabbitmq-server-0" Oct 11 05:06:10 crc kubenswrapper[4651]: I1011 05:06:10.772464 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/e73b125b-a52b-44bb-bbed-3a484f53a9cb-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"e73b125b-a52b-44bb-bbed-3a484f53a9cb\") " pod="openstack/rabbitmq-server-0" Oct 11 05:06:10 crc kubenswrapper[4651]: I1011 05:06:10.773625 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/e73b125b-a52b-44bb-bbed-3a484f53a9cb-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"e73b125b-a52b-44bb-bbed-3a484f53a9cb\") " pod="openstack/rabbitmq-server-0" Oct 11 05:06:10 crc kubenswrapper[4651]: I1011 05:06:10.773645 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/e73b125b-a52b-44bb-bbed-3a484f53a9cb-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"e73b125b-a52b-44bb-bbed-3a484f53a9cb\") " pod="openstack/rabbitmq-server-0" Oct 11 05:06:10 crc kubenswrapper[4651]: I1011 05:06:10.773703 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/e73b125b-a52b-44bb-bbed-3a484f53a9cb-server-conf\") pod \"rabbitmq-server-0\" (UID: \"e73b125b-a52b-44bb-bbed-3a484f53a9cb\") " pod="openstack/rabbitmq-server-0" Oct 11 05:06:10 crc kubenswrapper[4651]: I1011 05:06:10.773728 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bjgdc\" (UniqueName: \"kubernetes.io/projected/e73b125b-a52b-44bb-bbed-3a484f53a9cb-kube-api-access-bjgdc\") pod \"rabbitmq-server-0\" (UID: \"e73b125b-a52b-44bb-bbed-3a484f53a9cb\") " pod="openstack/rabbitmq-server-0" Oct 11 05:06:10 crc kubenswrapper[4651]: I1011 05:06:10.773750 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/e73b125b-a52b-44bb-bbed-3a484f53a9cb-pod-info\") pod \"rabbitmq-server-0\" (UID: \"e73b125b-a52b-44bb-bbed-3a484f53a9cb\") " pod="openstack/rabbitmq-server-0" Oct 11 05:06:10 crc kubenswrapper[4651]: I1011 05:06:10.774078 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/e73b125b-a52b-44bb-bbed-3a484f53a9cb-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"e73b125b-a52b-44bb-bbed-3a484f53a9cb\") " pod="openstack/rabbitmq-server-0" Oct 11 05:06:10 crc kubenswrapper[4651]: I1011 05:06:10.775897 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/e73b125b-a52b-44bb-bbed-3a484f53a9cb-server-conf\") pod \"rabbitmq-server-0\" (UID: \"e73b125b-a52b-44bb-bbed-3a484f53a9cb\") " pod="openstack/rabbitmq-server-0" Oct 11 05:06:10 crc kubenswrapper[4651]: I1011 05:06:10.778144 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/e73b125b-a52b-44bb-bbed-3a484f53a9cb-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"e73b125b-a52b-44bb-bbed-3a484f53a9cb\") " pod="openstack/rabbitmq-server-0" Oct 11 05:06:10 crc kubenswrapper[4651]: I1011 05:06:10.778208 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/e73b125b-a52b-44bb-bbed-3a484f53a9cb-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"e73b125b-a52b-44bb-bbed-3a484f53a9cb\") " pod="openstack/rabbitmq-server-0" Oct 11 05:06:10 crc kubenswrapper[4651]: I1011 05:06:10.780917 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/e73b125b-a52b-44bb-bbed-3a484f53a9cb-pod-info\") pod \"rabbitmq-server-0\" (UID: \"e73b125b-a52b-44bb-bbed-3a484f53a9cb\") " pod="openstack/rabbitmq-server-0" Oct 11 05:06:10 crc kubenswrapper[4651]: I1011 05:06:10.781494 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/e73b125b-a52b-44bb-bbed-3a484f53a9cb-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"e73b125b-a52b-44bb-bbed-3a484f53a9cb\") " pod="openstack/rabbitmq-server-0" Oct 11 05:06:10 crc kubenswrapper[4651]: I1011 05:06:10.794991 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bjgdc\" (UniqueName: \"kubernetes.io/projected/e73b125b-a52b-44bb-bbed-3a484f53a9cb-kube-api-access-bjgdc\") pod \"rabbitmq-server-0\" (UID: \"e73b125b-a52b-44bb-bbed-3a484f53a9cb\") " pod="openstack/rabbitmq-server-0" Oct 11 05:06:10 crc kubenswrapper[4651]: I1011 05:06:10.797832 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-server-0\" (UID: \"e73b125b-a52b-44bb-bbed-3a484f53a9cb\") " pod="openstack/rabbitmq-server-0" Oct 11 05:06:10 crc kubenswrapper[4651]: I1011 05:06:10.869133 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 11 05:06:10 crc kubenswrapper[4651]: I1011 05:06:10.871303 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 11 05:06:10 crc kubenswrapper[4651]: I1011 05:06:10.875204 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Oct 11 05:06:10 crc kubenswrapper[4651]: I1011 05:06:10.875248 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Oct 11 05:06:10 crc kubenswrapper[4651]: I1011 05:06:10.875727 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Oct 11 05:06:10 crc kubenswrapper[4651]: I1011 05:06:10.876113 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Oct 11 05:06:10 crc kubenswrapper[4651]: I1011 05:06:10.876273 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-jjrl8" Oct 11 05:06:10 crc kubenswrapper[4651]: I1011 05:06:10.876599 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Oct 11 05:06:10 crc kubenswrapper[4651]: I1011 05:06:10.876882 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Oct 11 05:06:10 crc kubenswrapper[4651]: I1011 05:06:10.885573 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 11 05:06:10 crc kubenswrapper[4651]: I1011 05:06:10.949950 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 11 05:06:10 crc kubenswrapper[4651]: I1011 05:06:10.979053 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/dbfeee44-d2ad-4a4b-814f-916176925aaf-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"dbfeee44-d2ad-4a4b-814f-916176925aaf\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 05:06:10 crc kubenswrapper[4651]: I1011 05:06:10.979131 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rz57r\" (UniqueName: \"kubernetes.io/projected/dbfeee44-d2ad-4a4b-814f-916176925aaf-kube-api-access-rz57r\") pod \"rabbitmq-cell1-server-0\" (UID: \"dbfeee44-d2ad-4a4b-814f-916176925aaf\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 05:06:10 crc kubenswrapper[4651]: I1011 05:06:10.979221 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"dbfeee44-d2ad-4a4b-814f-916176925aaf\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 05:06:10 crc kubenswrapper[4651]: I1011 05:06:10.979305 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/dbfeee44-d2ad-4a4b-814f-916176925aaf-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"dbfeee44-d2ad-4a4b-814f-916176925aaf\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 05:06:10 crc kubenswrapper[4651]: I1011 05:06:10.979342 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/dbfeee44-d2ad-4a4b-814f-916176925aaf-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"dbfeee44-d2ad-4a4b-814f-916176925aaf\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 05:06:10 crc kubenswrapper[4651]: I1011 05:06:10.979441 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/dbfeee44-d2ad-4a4b-814f-916176925aaf-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"dbfeee44-d2ad-4a4b-814f-916176925aaf\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 05:06:10 crc kubenswrapper[4651]: I1011 05:06:10.979518 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/dbfeee44-d2ad-4a4b-814f-916176925aaf-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"dbfeee44-d2ad-4a4b-814f-916176925aaf\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 05:06:10 crc kubenswrapper[4651]: I1011 05:06:10.979553 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/dbfeee44-d2ad-4a4b-814f-916176925aaf-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"dbfeee44-d2ad-4a4b-814f-916176925aaf\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 05:06:10 crc kubenswrapper[4651]: I1011 05:06:10.979653 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/dbfeee44-d2ad-4a4b-814f-916176925aaf-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"dbfeee44-d2ad-4a4b-814f-916176925aaf\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 05:06:10 crc kubenswrapper[4651]: I1011 05:06:10.979714 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/dbfeee44-d2ad-4a4b-814f-916176925aaf-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"dbfeee44-d2ad-4a4b-814f-916176925aaf\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 05:06:10 crc kubenswrapper[4651]: I1011 05:06:10.979760 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/dbfeee44-d2ad-4a4b-814f-916176925aaf-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"dbfeee44-d2ad-4a4b-814f-916176925aaf\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 05:06:11 crc kubenswrapper[4651]: I1011 05:06:11.081264 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/dbfeee44-d2ad-4a4b-814f-916176925aaf-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"dbfeee44-d2ad-4a4b-814f-916176925aaf\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 05:06:11 crc kubenswrapper[4651]: I1011 05:06:11.081303 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/dbfeee44-d2ad-4a4b-814f-916176925aaf-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"dbfeee44-d2ad-4a4b-814f-916176925aaf\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 05:06:11 crc kubenswrapper[4651]: I1011 05:06:11.081343 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/dbfeee44-d2ad-4a4b-814f-916176925aaf-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"dbfeee44-d2ad-4a4b-814f-916176925aaf\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 05:06:11 crc kubenswrapper[4651]: I1011 05:06:11.081370 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/dbfeee44-d2ad-4a4b-814f-916176925aaf-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"dbfeee44-d2ad-4a4b-814f-916176925aaf\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 05:06:11 crc kubenswrapper[4651]: I1011 05:06:11.081388 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/dbfeee44-d2ad-4a4b-814f-916176925aaf-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"dbfeee44-d2ad-4a4b-814f-916176925aaf\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 05:06:11 crc kubenswrapper[4651]: I1011 05:06:11.081427 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/dbfeee44-d2ad-4a4b-814f-916176925aaf-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"dbfeee44-d2ad-4a4b-814f-916176925aaf\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 05:06:11 crc kubenswrapper[4651]: I1011 05:06:11.081448 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/dbfeee44-d2ad-4a4b-814f-916176925aaf-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"dbfeee44-d2ad-4a4b-814f-916176925aaf\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 05:06:11 crc kubenswrapper[4651]: I1011 05:06:11.081468 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/dbfeee44-d2ad-4a4b-814f-916176925aaf-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"dbfeee44-d2ad-4a4b-814f-916176925aaf\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 05:06:11 crc kubenswrapper[4651]: I1011 05:06:11.081485 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/dbfeee44-d2ad-4a4b-814f-916176925aaf-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"dbfeee44-d2ad-4a4b-814f-916176925aaf\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 05:06:11 crc kubenswrapper[4651]: I1011 05:06:11.081505 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rz57r\" (UniqueName: \"kubernetes.io/projected/dbfeee44-d2ad-4a4b-814f-916176925aaf-kube-api-access-rz57r\") pod \"rabbitmq-cell1-server-0\" (UID: \"dbfeee44-d2ad-4a4b-814f-916176925aaf\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 05:06:11 crc kubenswrapper[4651]: I1011 05:06:11.081523 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"dbfeee44-d2ad-4a4b-814f-916176925aaf\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 05:06:11 crc kubenswrapper[4651]: I1011 05:06:11.081681 4651 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"dbfeee44-d2ad-4a4b-814f-916176925aaf\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/rabbitmq-cell1-server-0" Oct 11 05:06:11 crc kubenswrapper[4651]: I1011 05:06:11.083453 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/dbfeee44-d2ad-4a4b-814f-916176925aaf-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"dbfeee44-d2ad-4a4b-814f-916176925aaf\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 05:06:11 crc kubenswrapper[4651]: I1011 05:06:11.084098 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/dbfeee44-d2ad-4a4b-814f-916176925aaf-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"dbfeee44-d2ad-4a4b-814f-916176925aaf\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 05:06:11 crc kubenswrapper[4651]: I1011 05:06:11.084189 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/dbfeee44-d2ad-4a4b-814f-916176925aaf-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"dbfeee44-d2ad-4a4b-814f-916176925aaf\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 05:06:11 crc kubenswrapper[4651]: I1011 05:06:11.084379 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/dbfeee44-d2ad-4a4b-814f-916176925aaf-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"dbfeee44-d2ad-4a4b-814f-916176925aaf\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 05:06:11 crc kubenswrapper[4651]: I1011 05:06:11.084896 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/dbfeee44-d2ad-4a4b-814f-916176925aaf-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"dbfeee44-d2ad-4a4b-814f-916176925aaf\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 05:06:11 crc kubenswrapper[4651]: I1011 05:06:11.085061 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/dbfeee44-d2ad-4a4b-814f-916176925aaf-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"dbfeee44-d2ad-4a4b-814f-916176925aaf\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 05:06:11 crc kubenswrapper[4651]: I1011 05:06:11.086758 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/dbfeee44-d2ad-4a4b-814f-916176925aaf-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"dbfeee44-d2ad-4a4b-814f-916176925aaf\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 05:06:11 crc kubenswrapper[4651]: I1011 05:06:11.093616 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/dbfeee44-d2ad-4a4b-814f-916176925aaf-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"dbfeee44-d2ad-4a4b-814f-916176925aaf\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 05:06:11 crc kubenswrapper[4651]: I1011 05:06:11.094477 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/dbfeee44-d2ad-4a4b-814f-916176925aaf-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"dbfeee44-d2ad-4a4b-814f-916176925aaf\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 05:06:11 crc kubenswrapper[4651]: I1011 05:06:11.104754 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"dbfeee44-d2ad-4a4b-814f-916176925aaf\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 05:06:11 crc kubenswrapper[4651]: I1011 05:06:11.105185 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rz57r\" (UniqueName: \"kubernetes.io/projected/dbfeee44-d2ad-4a4b-814f-916176925aaf-kube-api-access-rz57r\") pod \"rabbitmq-cell1-server-0\" (UID: \"dbfeee44-d2ad-4a4b-814f-916176925aaf\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 05:06:11 crc kubenswrapper[4651]: I1011 05:06:11.207992 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 11 05:06:12 crc kubenswrapper[4651]: I1011 05:06:12.377647 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Oct 11 05:06:12 crc kubenswrapper[4651]: I1011 05:06:12.380277 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Oct 11 05:06:12 crc kubenswrapper[4651]: I1011 05:06:12.386151 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Oct 11 05:06:12 crc kubenswrapper[4651]: I1011 05:06:12.386484 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Oct 11 05:06:12 crc kubenswrapper[4651]: I1011 05:06:12.386756 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Oct 11 05:06:12 crc kubenswrapper[4651]: I1011 05:06:12.387871 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-kg6f2" Oct 11 05:06:12 crc kubenswrapper[4651]: I1011 05:06:12.388041 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Oct 11 05:06:12 crc kubenswrapper[4651]: I1011 05:06:12.390554 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Oct 11 05:06:12 crc kubenswrapper[4651]: I1011 05:06:12.393119 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Oct 11 05:06:12 crc kubenswrapper[4651]: I1011 05:06:12.514082 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"openstack-galera-0\" (UID: \"6e4d1f39-f0c4-4a19-a525-d0119d4b77e5\") " pod="openstack/openstack-galera-0" Oct 11 05:06:12 crc kubenswrapper[4651]: I1011 05:06:12.514137 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e4d1f39-f0c4-4a19-a525-d0119d4b77e5-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"6e4d1f39-f0c4-4a19-a525-d0119d4b77e5\") " pod="openstack/openstack-galera-0" Oct 11 05:06:12 crc kubenswrapper[4651]: I1011 05:06:12.514164 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/6e4d1f39-f0c4-4a19-a525-d0119d4b77e5-config-data-default\") pod \"openstack-galera-0\" (UID: \"6e4d1f39-f0c4-4a19-a525-d0119d4b77e5\") " pod="openstack/openstack-galera-0" Oct 11 05:06:12 crc kubenswrapper[4651]: I1011 05:06:12.514211 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/6e4d1f39-f0c4-4a19-a525-d0119d4b77e5-config-data-generated\") pod \"openstack-galera-0\" (UID: \"6e4d1f39-f0c4-4a19-a525-d0119d4b77e5\") " pod="openstack/openstack-galera-0" Oct 11 05:06:12 crc kubenswrapper[4651]: I1011 05:06:12.514244 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/6e4d1f39-f0c4-4a19-a525-d0119d4b77e5-kolla-config\") pod \"openstack-galera-0\" (UID: \"6e4d1f39-f0c4-4a19-a525-d0119d4b77e5\") " pod="openstack/openstack-galera-0" Oct 11 05:06:12 crc kubenswrapper[4651]: I1011 05:06:12.514271 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/6e4d1f39-f0c4-4a19-a525-d0119d4b77e5-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"6e4d1f39-f0c4-4a19-a525-d0119d4b77e5\") " pod="openstack/openstack-galera-0" Oct 11 05:06:12 crc kubenswrapper[4651]: I1011 05:06:12.514303 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/6e4d1f39-f0c4-4a19-a525-d0119d4b77e5-secrets\") pod \"openstack-galera-0\" (UID: \"6e4d1f39-f0c4-4a19-a525-d0119d4b77e5\") " pod="openstack/openstack-galera-0" Oct 11 05:06:12 crc kubenswrapper[4651]: I1011 05:06:12.514352 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6e4d1f39-f0c4-4a19-a525-d0119d4b77e5-operator-scripts\") pod \"openstack-galera-0\" (UID: \"6e4d1f39-f0c4-4a19-a525-d0119d4b77e5\") " pod="openstack/openstack-galera-0" Oct 11 05:06:12 crc kubenswrapper[4651]: I1011 05:06:12.514381 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-648kg\" (UniqueName: \"kubernetes.io/projected/6e4d1f39-f0c4-4a19-a525-d0119d4b77e5-kube-api-access-648kg\") pod \"openstack-galera-0\" (UID: \"6e4d1f39-f0c4-4a19-a525-d0119d4b77e5\") " pod="openstack/openstack-galera-0" Oct 11 05:06:12 crc kubenswrapper[4651]: I1011 05:06:12.616221 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6e4d1f39-f0c4-4a19-a525-d0119d4b77e5-operator-scripts\") pod \"openstack-galera-0\" (UID: \"6e4d1f39-f0c4-4a19-a525-d0119d4b77e5\") " pod="openstack/openstack-galera-0" Oct 11 05:06:12 crc kubenswrapper[4651]: I1011 05:06:12.616283 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-648kg\" (UniqueName: \"kubernetes.io/projected/6e4d1f39-f0c4-4a19-a525-d0119d4b77e5-kube-api-access-648kg\") pod \"openstack-galera-0\" (UID: \"6e4d1f39-f0c4-4a19-a525-d0119d4b77e5\") " pod="openstack/openstack-galera-0" Oct 11 05:06:12 crc kubenswrapper[4651]: I1011 05:06:12.616328 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"openstack-galera-0\" (UID: \"6e4d1f39-f0c4-4a19-a525-d0119d4b77e5\") " pod="openstack/openstack-galera-0" Oct 11 05:06:12 crc kubenswrapper[4651]: I1011 05:06:12.616361 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e4d1f39-f0c4-4a19-a525-d0119d4b77e5-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"6e4d1f39-f0c4-4a19-a525-d0119d4b77e5\") " pod="openstack/openstack-galera-0" Oct 11 05:06:12 crc kubenswrapper[4651]: I1011 05:06:12.616383 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/6e4d1f39-f0c4-4a19-a525-d0119d4b77e5-config-data-default\") pod \"openstack-galera-0\" (UID: \"6e4d1f39-f0c4-4a19-a525-d0119d4b77e5\") " pod="openstack/openstack-galera-0" Oct 11 05:06:12 crc kubenswrapper[4651]: I1011 05:06:12.616429 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/6e4d1f39-f0c4-4a19-a525-d0119d4b77e5-config-data-generated\") pod \"openstack-galera-0\" (UID: \"6e4d1f39-f0c4-4a19-a525-d0119d4b77e5\") " pod="openstack/openstack-galera-0" Oct 11 05:06:12 crc kubenswrapper[4651]: I1011 05:06:12.616463 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/6e4d1f39-f0c4-4a19-a525-d0119d4b77e5-kolla-config\") pod \"openstack-galera-0\" (UID: \"6e4d1f39-f0c4-4a19-a525-d0119d4b77e5\") " pod="openstack/openstack-galera-0" Oct 11 05:06:12 crc kubenswrapper[4651]: I1011 05:06:12.616486 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/6e4d1f39-f0c4-4a19-a525-d0119d4b77e5-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"6e4d1f39-f0c4-4a19-a525-d0119d4b77e5\") " pod="openstack/openstack-galera-0" Oct 11 05:06:12 crc kubenswrapper[4651]: I1011 05:06:12.616519 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/6e4d1f39-f0c4-4a19-a525-d0119d4b77e5-secrets\") pod \"openstack-galera-0\" (UID: \"6e4d1f39-f0c4-4a19-a525-d0119d4b77e5\") " pod="openstack/openstack-galera-0" Oct 11 05:06:12 crc kubenswrapper[4651]: I1011 05:06:12.617863 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/6e4d1f39-f0c4-4a19-a525-d0119d4b77e5-config-data-generated\") pod \"openstack-galera-0\" (UID: \"6e4d1f39-f0c4-4a19-a525-d0119d4b77e5\") " pod="openstack/openstack-galera-0" Oct 11 05:06:12 crc kubenswrapper[4651]: I1011 05:06:12.618500 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/6e4d1f39-f0c4-4a19-a525-d0119d4b77e5-config-data-default\") pod \"openstack-galera-0\" (UID: \"6e4d1f39-f0c4-4a19-a525-d0119d4b77e5\") " pod="openstack/openstack-galera-0" Oct 11 05:06:12 crc kubenswrapper[4651]: I1011 05:06:12.619217 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/6e4d1f39-f0c4-4a19-a525-d0119d4b77e5-kolla-config\") pod \"openstack-galera-0\" (UID: \"6e4d1f39-f0c4-4a19-a525-d0119d4b77e5\") " pod="openstack/openstack-galera-0" Oct 11 05:06:12 crc kubenswrapper[4651]: I1011 05:06:12.619490 4651 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"openstack-galera-0\" (UID: \"6e4d1f39-f0c4-4a19-a525-d0119d4b77e5\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/openstack-galera-0" Oct 11 05:06:12 crc kubenswrapper[4651]: I1011 05:06:12.620466 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6e4d1f39-f0c4-4a19-a525-d0119d4b77e5-operator-scripts\") pod \"openstack-galera-0\" (UID: \"6e4d1f39-f0c4-4a19-a525-d0119d4b77e5\") " pod="openstack/openstack-galera-0" Oct 11 05:06:12 crc kubenswrapper[4651]: I1011 05:06:12.623474 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/6e4d1f39-f0c4-4a19-a525-d0119d4b77e5-secrets\") pod \"openstack-galera-0\" (UID: \"6e4d1f39-f0c4-4a19-a525-d0119d4b77e5\") " pod="openstack/openstack-galera-0" Oct 11 05:06:12 crc kubenswrapper[4651]: I1011 05:06:12.633080 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e4d1f39-f0c4-4a19-a525-d0119d4b77e5-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"6e4d1f39-f0c4-4a19-a525-d0119d4b77e5\") " pod="openstack/openstack-galera-0" Oct 11 05:06:12 crc kubenswrapper[4651]: I1011 05:06:12.637385 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/6e4d1f39-f0c4-4a19-a525-d0119d4b77e5-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"6e4d1f39-f0c4-4a19-a525-d0119d4b77e5\") " pod="openstack/openstack-galera-0" Oct 11 05:06:12 crc kubenswrapper[4651]: I1011 05:06:12.644725 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-648kg\" (UniqueName: \"kubernetes.io/projected/6e4d1f39-f0c4-4a19-a525-d0119d4b77e5-kube-api-access-648kg\") pod \"openstack-galera-0\" (UID: \"6e4d1f39-f0c4-4a19-a525-d0119d4b77e5\") " pod="openstack/openstack-galera-0" Oct 11 05:06:12 crc kubenswrapper[4651]: I1011 05:06:12.654701 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"openstack-galera-0\" (UID: \"6e4d1f39-f0c4-4a19-a525-d0119d4b77e5\") " pod="openstack/openstack-galera-0" Oct 11 05:06:12 crc kubenswrapper[4651]: I1011 05:06:12.740350 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Oct 11 05:06:13 crc kubenswrapper[4651]: I1011 05:06:13.691377 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Oct 11 05:06:13 crc kubenswrapper[4651]: I1011 05:06:13.693456 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Oct 11 05:06:13 crc kubenswrapper[4651]: I1011 05:06:13.694867 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-q2c42" Oct 11 05:06:13 crc kubenswrapper[4651]: I1011 05:06:13.695970 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Oct 11 05:06:13 crc kubenswrapper[4651]: I1011 05:06:13.696326 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Oct 11 05:06:13 crc kubenswrapper[4651]: I1011 05:06:13.696714 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Oct 11 05:06:13 crc kubenswrapper[4651]: I1011 05:06:13.706680 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Oct 11 05:06:13 crc kubenswrapper[4651]: I1011 05:06:13.731692 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/c7fc16c5-4cac-4da2-82d1-226d056fe645-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"c7fc16c5-4cac-4da2-82d1-226d056fe645\") " pod="openstack/openstack-cell1-galera-0" Oct 11 05:06:13 crc kubenswrapper[4651]: I1011 05:06:13.731761 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"openstack-cell1-galera-0\" (UID: \"c7fc16c5-4cac-4da2-82d1-226d056fe645\") " pod="openstack/openstack-cell1-galera-0" Oct 11 05:06:13 crc kubenswrapper[4651]: I1011 05:06:13.731789 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/c7fc16c5-4cac-4da2-82d1-226d056fe645-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"c7fc16c5-4cac-4da2-82d1-226d056fe645\") " pod="openstack/openstack-cell1-galera-0" Oct 11 05:06:13 crc kubenswrapper[4651]: I1011 05:06:13.731825 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c7fc16c5-4cac-4da2-82d1-226d056fe645-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"c7fc16c5-4cac-4da2-82d1-226d056fe645\") " pod="openstack/openstack-cell1-galera-0" Oct 11 05:06:13 crc kubenswrapper[4651]: I1011 05:06:13.731871 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bgbzq\" (UniqueName: \"kubernetes.io/projected/c7fc16c5-4cac-4da2-82d1-226d056fe645-kube-api-access-bgbzq\") pod \"openstack-cell1-galera-0\" (UID: \"c7fc16c5-4cac-4da2-82d1-226d056fe645\") " pod="openstack/openstack-cell1-galera-0" Oct 11 05:06:13 crc kubenswrapper[4651]: I1011 05:06:13.731953 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c7fc16c5-4cac-4da2-82d1-226d056fe645-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"c7fc16c5-4cac-4da2-82d1-226d056fe645\") " pod="openstack/openstack-cell1-galera-0" Oct 11 05:06:13 crc kubenswrapper[4651]: I1011 05:06:13.732850 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c7fc16c5-4cac-4da2-82d1-226d056fe645-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"c7fc16c5-4cac-4da2-82d1-226d056fe645\") " pod="openstack/openstack-cell1-galera-0" Oct 11 05:06:13 crc kubenswrapper[4651]: I1011 05:06:13.733092 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/c7fc16c5-4cac-4da2-82d1-226d056fe645-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"c7fc16c5-4cac-4da2-82d1-226d056fe645\") " pod="openstack/openstack-cell1-galera-0" Oct 11 05:06:13 crc kubenswrapper[4651]: I1011 05:06:13.733229 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/c7fc16c5-4cac-4da2-82d1-226d056fe645-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"c7fc16c5-4cac-4da2-82d1-226d056fe645\") " pod="openstack/openstack-cell1-galera-0" Oct 11 05:06:13 crc kubenswrapper[4651]: I1011 05:06:13.834921 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/c7fc16c5-4cac-4da2-82d1-226d056fe645-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"c7fc16c5-4cac-4da2-82d1-226d056fe645\") " pod="openstack/openstack-cell1-galera-0" Oct 11 05:06:13 crc kubenswrapper[4651]: I1011 05:06:13.834982 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"openstack-cell1-galera-0\" (UID: \"c7fc16c5-4cac-4da2-82d1-226d056fe645\") " pod="openstack/openstack-cell1-galera-0" Oct 11 05:06:13 crc kubenswrapper[4651]: I1011 05:06:13.835004 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/c7fc16c5-4cac-4da2-82d1-226d056fe645-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"c7fc16c5-4cac-4da2-82d1-226d056fe645\") " pod="openstack/openstack-cell1-galera-0" Oct 11 05:06:13 crc kubenswrapper[4651]: I1011 05:06:13.835028 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c7fc16c5-4cac-4da2-82d1-226d056fe645-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"c7fc16c5-4cac-4da2-82d1-226d056fe645\") " pod="openstack/openstack-cell1-galera-0" Oct 11 05:06:13 crc kubenswrapper[4651]: I1011 05:06:13.835047 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bgbzq\" (UniqueName: \"kubernetes.io/projected/c7fc16c5-4cac-4da2-82d1-226d056fe645-kube-api-access-bgbzq\") pod \"openstack-cell1-galera-0\" (UID: \"c7fc16c5-4cac-4da2-82d1-226d056fe645\") " pod="openstack/openstack-cell1-galera-0" Oct 11 05:06:13 crc kubenswrapper[4651]: I1011 05:06:13.835064 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c7fc16c5-4cac-4da2-82d1-226d056fe645-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"c7fc16c5-4cac-4da2-82d1-226d056fe645\") " pod="openstack/openstack-cell1-galera-0" Oct 11 05:06:13 crc kubenswrapper[4651]: I1011 05:06:13.835079 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c7fc16c5-4cac-4da2-82d1-226d056fe645-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"c7fc16c5-4cac-4da2-82d1-226d056fe645\") " pod="openstack/openstack-cell1-galera-0" Oct 11 05:06:13 crc kubenswrapper[4651]: I1011 05:06:13.835114 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/c7fc16c5-4cac-4da2-82d1-226d056fe645-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"c7fc16c5-4cac-4da2-82d1-226d056fe645\") " pod="openstack/openstack-cell1-galera-0" Oct 11 05:06:13 crc kubenswrapper[4651]: I1011 05:06:13.835139 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/c7fc16c5-4cac-4da2-82d1-226d056fe645-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"c7fc16c5-4cac-4da2-82d1-226d056fe645\") " pod="openstack/openstack-cell1-galera-0" Oct 11 05:06:13 crc kubenswrapper[4651]: I1011 05:06:13.835334 4651 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"openstack-cell1-galera-0\" (UID: \"c7fc16c5-4cac-4da2-82d1-226d056fe645\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/openstack-cell1-galera-0" Oct 11 05:06:13 crc kubenswrapper[4651]: I1011 05:06:13.835700 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/c7fc16c5-4cac-4da2-82d1-226d056fe645-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"c7fc16c5-4cac-4da2-82d1-226d056fe645\") " pod="openstack/openstack-cell1-galera-0" Oct 11 05:06:13 crc kubenswrapper[4651]: I1011 05:06:13.835982 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/c7fc16c5-4cac-4da2-82d1-226d056fe645-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"c7fc16c5-4cac-4da2-82d1-226d056fe645\") " pod="openstack/openstack-cell1-galera-0" Oct 11 05:06:13 crc kubenswrapper[4651]: I1011 05:06:13.836324 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c7fc16c5-4cac-4da2-82d1-226d056fe645-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"c7fc16c5-4cac-4da2-82d1-226d056fe645\") " pod="openstack/openstack-cell1-galera-0" Oct 11 05:06:13 crc kubenswrapper[4651]: I1011 05:06:13.837800 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c7fc16c5-4cac-4da2-82d1-226d056fe645-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"c7fc16c5-4cac-4da2-82d1-226d056fe645\") " pod="openstack/openstack-cell1-galera-0" Oct 11 05:06:13 crc kubenswrapper[4651]: I1011 05:06:13.840853 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c7fc16c5-4cac-4da2-82d1-226d056fe645-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"c7fc16c5-4cac-4da2-82d1-226d056fe645\") " pod="openstack/openstack-cell1-galera-0" Oct 11 05:06:13 crc kubenswrapper[4651]: I1011 05:06:13.842383 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/c7fc16c5-4cac-4da2-82d1-226d056fe645-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"c7fc16c5-4cac-4da2-82d1-226d056fe645\") " pod="openstack/openstack-cell1-galera-0" Oct 11 05:06:13 crc kubenswrapper[4651]: I1011 05:06:13.855222 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/c7fc16c5-4cac-4da2-82d1-226d056fe645-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"c7fc16c5-4cac-4da2-82d1-226d056fe645\") " pod="openstack/openstack-cell1-galera-0" Oct 11 05:06:13 crc kubenswrapper[4651]: I1011 05:06:13.863663 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bgbzq\" (UniqueName: \"kubernetes.io/projected/c7fc16c5-4cac-4da2-82d1-226d056fe645-kube-api-access-bgbzq\") pod \"openstack-cell1-galera-0\" (UID: \"c7fc16c5-4cac-4da2-82d1-226d056fe645\") " pod="openstack/openstack-cell1-galera-0" Oct 11 05:06:13 crc kubenswrapper[4651]: I1011 05:06:13.871868 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"openstack-cell1-galera-0\" (UID: \"c7fc16c5-4cac-4da2-82d1-226d056fe645\") " pod="openstack/openstack-cell1-galera-0" Oct 11 05:06:13 crc kubenswrapper[4651]: I1011 05:06:13.945723 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Oct 11 05:06:13 crc kubenswrapper[4651]: I1011 05:06:13.946635 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Oct 11 05:06:13 crc kubenswrapper[4651]: I1011 05:06:13.948876 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-7b2g5" Oct 11 05:06:13 crc kubenswrapper[4651]: I1011 05:06:13.949034 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Oct 11 05:06:13 crc kubenswrapper[4651]: I1011 05:06:13.949906 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Oct 11 05:06:13 crc kubenswrapper[4651]: I1011 05:06:13.963537 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Oct 11 05:06:14 crc kubenswrapper[4651]: I1011 05:06:14.020455 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Oct 11 05:06:14 crc kubenswrapper[4651]: I1011 05:06:14.038098 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xj25h\" (UniqueName: \"kubernetes.io/projected/935c9395-17f6-4c8f-a08a-e3af25a75a9a-kube-api-access-xj25h\") pod \"memcached-0\" (UID: \"935c9395-17f6-4c8f-a08a-e3af25a75a9a\") " pod="openstack/memcached-0" Oct 11 05:06:14 crc kubenswrapper[4651]: I1011 05:06:14.038202 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/935c9395-17f6-4c8f-a08a-e3af25a75a9a-kolla-config\") pod \"memcached-0\" (UID: \"935c9395-17f6-4c8f-a08a-e3af25a75a9a\") " pod="openstack/memcached-0" Oct 11 05:06:14 crc kubenswrapper[4651]: I1011 05:06:14.038229 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/935c9395-17f6-4c8f-a08a-e3af25a75a9a-config-data\") pod \"memcached-0\" (UID: \"935c9395-17f6-4c8f-a08a-e3af25a75a9a\") " pod="openstack/memcached-0" Oct 11 05:06:14 crc kubenswrapper[4651]: I1011 05:06:14.038303 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/935c9395-17f6-4c8f-a08a-e3af25a75a9a-memcached-tls-certs\") pod \"memcached-0\" (UID: \"935c9395-17f6-4c8f-a08a-e3af25a75a9a\") " pod="openstack/memcached-0" Oct 11 05:06:14 crc kubenswrapper[4651]: I1011 05:06:14.038341 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/935c9395-17f6-4c8f-a08a-e3af25a75a9a-combined-ca-bundle\") pod \"memcached-0\" (UID: \"935c9395-17f6-4c8f-a08a-e3af25a75a9a\") " pod="openstack/memcached-0" Oct 11 05:06:14 crc kubenswrapper[4651]: I1011 05:06:14.141599 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/935c9395-17f6-4c8f-a08a-e3af25a75a9a-memcached-tls-certs\") pod \"memcached-0\" (UID: \"935c9395-17f6-4c8f-a08a-e3af25a75a9a\") " pod="openstack/memcached-0" Oct 11 05:06:14 crc kubenswrapper[4651]: I1011 05:06:14.141655 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/935c9395-17f6-4c8f-a08a-e3af25a75a9a-combined-ca-bundle\") pod \"memcached-0\" (UID: \"935c9395-17f6-4c8f-a08a-e3af25a75a9a\") " pod="openstack/memcached-0" Oct 11 05:06:14 crc kubenswrapper[4651]: I1011 05:06:14.141739 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xj25h\" (UniqueName: \"kubernetes.io/projected/935c9395-17f6-4c8f-a08a-e3af25a75a9a-kube-api-access-xj25h\") pod \"memcached-0\" (UID: \"935c9395-17f6-4c8f-a08a-e3af25a75a9a\") " pod="openstack/memcached-0" Oct 11 05:06:14 crc kubenswrapper[4651]: I1011 05:06:14.141796 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/935c9395-17f6-4c8f-a08a-e3af25a75a9a-kolla-config\") pod \"memcached-0\" (UID: \"935c9395-17f6-4c8f-a08a-e3af25a75a9a\") " pod="openstack/memcached-0" Oct 11 05:06:14 crc kubenswrapper[4651]: I1011 05:06:14.141819 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/935c9395-17f6-4c8f-a08a-e3af25a75a9a-config-data\") pod \"memcached-0\" (UID: \"935c9395-17f6-4c8f-a08a-e3af25a75a9a\") " pod="openstack/memcached-0" Oct 11 05:06:14 crc kubenswrapper[4651]: I1011 05:06:14.143528 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/935c9395-17f6-4c8f-a08a-e3af25a75a9a-config-data\") pod \"memcached-0\" (UID: \"935c9395-17f6-4c8f-a08a-e3af25a75a9a\") " pod="openstack/memcached-0" Oct 11 05:06:14 crc kubenswrapper[4651]: I1011 05:06:14.144043 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/935c9395-17f6-4c8f-a08a-e3af25a75a9a-kolla-config\") pod \"memcached-0\" (UID: \"935c9395-17f6-4c8f-a08a-e3af25a75a9a\") " pod="openstack/memcached-0" Oct 11 05:06:14 crc kubenswrapper[4651]: I1011 05:06:14.156735 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/935c9395-17f6-4c8f-a08a-e3af25a75a9a-memcached-tls-certs\") pod \"memcached-0\" (UID: \"935c9395-17f6-4c8f-a08a-e3af25a75a9a\") " pod="openstack/memcached-0" Oct 11 05:06:14 crc kubenswrapper[4651]: I1011 05:06:14.156810 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/935c9395-17f6-4c8f-a08a-e3af25a75a9a-combined-ca-bundle\") pod \"memcached-0\" (UID: \"935c9395-17f6-4c8f-a08a-e3af25a75a9a\") " pod="openstack/memcached-0" Oct 11 05:06:14 crc kubenswrapper[4651]: I1011 05:06:14.169207 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xj25h\" (UniqueName: \"kubernetes.io/projected/935c9395-17f6-4c8f-a08a-e3af25a75a9a-kube-api-access-xj25h\") pod \"memcached-0\" (UID: \"935c9395-17f6-4c8f-a08a-e3af25a75a9a\") " pod="openstack/memcached-0" Oct 11 05:06:14 crc kubenswrapper[4651]: I1011 05:06:14.266817 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Oct 11 05:06:15 crc kubenswrapper[4651]: I1011 05:06:15.908112 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Oct 11 05:06:15 crc kubenswrapper[4651]: I1011 05:06:15.909638 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 11 05:06:15 crc kubenswrapper[4651]: I1011 05:06:15.911541 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-bspfg" Oct 11 05:06:15 crc kubenswrapper[4651]: I1011 05:06:15.917675 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 11 05:06:15 crc kubenswrapper[4651]: I1011 05:06:15.967412 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h8h7t\" (UniqueName: \"kubernetes.io/projected/24cb59c7-7c18-42ef-9e4e-a9cad024bf49-kube-api-access-h8h7t\") pod \"kube-state-metrics-0\" (UID: \"24cb59c7-7c18-42ef-9e4e-a9cad024bf49\") " pod="openstack/kube-state-metrics-0" Oct 11 05:06:16 crc kubenswrapper[4651]: I1011 05:06:16.070519 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h8h7t\" (UniqueName: \"kubernetes.io/projected/24cb59c7-7c18-42ef-9e4e-a9cad024bf49-kube-api-access-h8h7t\") pod \"kube-state-metrics-0\" (UID: \"24cb59c7-7c18-42ef-9e4e-a9cad024bf49\") " pod="openstack/kube-state-metrics-0" Oct 11 05:06:16 crc kubenswrapper[4651]: I1011 05:06:16.089360 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h8h7t\" (UniqueName: \"kubernetes.io/projected/24cb59c7-7c18-42ef-9e4e-a9cad024bf49-kube-api-access-h8h7t\") pod \"kube-state-metrics-0\" (UID: \"24cb59c7-7c18-42ef-9e4e-a9cad024bf49\") " pod="openstack/kube-state-metrics-0" Oct 11 05:06:16 crc kubenswrapper[4651]: I1011 05:06:16.229044 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 11 05:06:16 crc kubenswrapper[4651]: I1011 05:06:16.310727 4651 patch_prober.go:28] interesting pod/machine-config-daemon-78jnv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 05:06:16 crc kubenswrapper[4651]: I1011 05:06:16.310775 4651 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 05:06:19 crc kubenswrapper[4651]: I1011 05:06:19.526903 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-4gdx9"] Oct 11 05:06:19 crc kubenswrapper[4651]: I1011 05:06:19.527854 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-4gdx9" Oct 11 05:06:19 crc kubenswrapper[4651]: I1011 05:06:19.530030 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Oct 11 05:06:19 crc kubenswrapper[4651]: I1011 05:06:19.530265 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Oct 11 05:06:19 crc kubenswrapper[4651]: I1011 05:06:19.530471 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-xmdrb" Oct 11 05:06:19 crc kubenswrapper[4651]: I1011 05:06:19.532053 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-4gdx9"] Oct 11 05:06:19 crc kubenswrapper[4651]: I1011 05:06:19.562933 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-2rjqb"] Oct 11 05:06:19 crc kubenswrapper[4651]: I1011 05:06:19.564907 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-2rjqb" Oct 11 05:06:19 crc kubenswrapper[4651]: I1011 05:06:19.577551 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-2rjqb"] Oct 11 05:06:19 crc kubenswrapper[4651]: I1011 05:06:19.628582 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/058302c4-d304-4a99-afbc-84a558968cfe-var-run-ovn\") pod \"ovn-controller-4gdx9\" (UID: \"058302c4-d304-4a99-afbc-84a558968cfe\") " pod="openstack/ovn-controller-4gdx9" Oct 11 05:06:19 crc kubenswrapper[4651]: I1011 05:06:19.628845 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hj9d6\" (UniqueName: \"kubernetes.io/projected/058302c4-d304-4a99-afbc-84a558968cfe-kube-api-access-hj9d6\") pod \"ovn-controller-4gdx9\" (UID: \"058302c4-d304-4a99-afbc-84a558968cfe\") " pod="openstack/ovn-controller-4gdx9" Oct 11 05:06:19 crc kubenswrapper[4651]: I1011 05:06:19.628874 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/058302c4-d304-4a99-afbc-84a558968cfe-var-log-ovn\") pod \"ovn-controller-4gdx9\" (UID: \"058302c4-d304-4a99-afbc-84a558968cfe\") " pod="openstack/ovn-controller-4gdx9" Oct 11 05:06:19 crc kubenswrapper[4651]: I1011 05:06:19.628891 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/058302c4-d304-4a99-afbc-84a558968cfe-scripts\") pod \"ovn-controller-4gdx9\" (UID: \"058302c4-d304-4a99-afbc-84a558968cfe\") " pod="openstack/ovn-controller-4gdx9" Oct 11 05:06:19 crc kubenswrapper[4651]: I1011 05:06:19.628931 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/15e07425-1bc8-43c4-ab2a-9daf9e9f95bb-etc-ovs\") pod \"ovn-controller-ovs-2rjqb\" (UID: \"15e07425-1bc8-43c4-ab2a-9daf9e9f95bb\") " pod="openstack/ovn-controller-ovs-2rjqb" Oct 11 05:06:19 crc kubenswrapper[4651]: I1011 05:06:19.628944 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/058302c4-d304-4a99-afbc-84a558968cfe-var-run\") pod \"ovn-controller-4gdx9\" (UID: \"058302c4-d304-4a99-afbc-84a558968cfe\") " pod="openstack/ovn-controller-4gdx9" Oct 11 05:06:19 crc kubenswrapper[4651]: I1011 05:06:19.628999 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/058302c4-d304-4a99-afbc-84a558968cfe-combined-ca-bundle\") pod \"ovn-controller-4gdx9\" (UID: \"058302c4-d304-4a99-afbc-84a558968cfe\") " pod="openstack/ovn-controller-4gdx9" Oct 11 05:06:19 crc kubenswrapper[4651]: I1011 05:06:19.629034 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/15e07425-1bc8-43c4-ab2a-9daf9e9f95bb-scripts\") pod \"ovn-controller-ovs-2rjqb\" (UID: \"15e07425-1bc8-43c4-ab2a-9daf9e9f95bb\") " pod="openstack/ovn-controller-ovs-2rjqb" Oct 11 05:06:19 crc kubenswrapper[4651]: I1011 05:06:19.629048 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/058302c4-d304-4a99-afbc-84a558968cfe-ovn-controller-tls-certs\") pod \"ovn-controller-4gdx9\" (UID: \"058302c4-d304-4a99-afbc-84a558968cfe\") " pod="openstack/ovn-controller-4gdx9" Oct 11 05:06:19 crc kubenswrapper[4651]: I1011 05:06:19.629066 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/15e07425-1bc8-43c4-ab2a-9daf9e9f95bb-var-run\") pod \"ovn-controller-ovs-2rjqb\" (UID: \"15e07425-1bc8-43c4-ab2a-9daf9e9f95bb\") " pod="openstack/ovn-controller-ovs-2rjqb" Oct 11 05:06:19 crc kubenswrapper[4651]: I1011 05:06:19.629080 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/15e07425-1bc8-43c4-ab2a-9daf9e9f95bb-var-log\") pod \"ovn-controller-ovs-2rjqb\" (UID: \"15e07425-1bc8-43c4-ab2a-9daf9e9f95bb\") " pod="openstack/ovn-controller-ovs-2rjqb" Oct 11 05:06:19 crc kubenswrapper[4651]: I1011 05:06:19.629098 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/15e07425-1bc8-43c4-ab2a-9daf9e9f95bb-var-lib\") pod \"ovn-controller-ovs-2rjqb\" (UID: \"15e07425-1bc8-43c4-ab2a-9daf9e9f95bb\") " pod="openstack/ovn-controller-ovs-2rjqb" Oct 11 05:06:19 crc kubenswrapper[4651]: I1011 05:06:19.629112 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zrc8k\" (UniqueName: \"kubernetes.io/projected/15e07425-1bc8-43c4-ab2a-9daf9e9f95bb-kube-api-access-zrc8k\") pod \"ovn-controller-ovs-2rjqb\" (UID: \"15e07425-1bc8-43c4-ab2a-9daf9e9f95bb\") " pod="openstack/ovn-controller-ovs-2rjqb" Oct 11 05:06:19 crc kubenswrapper[4651]: I1011 05:06:19.729887 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/058302c4-d304-4a99-afbc-84a558968cfe-var-run\") pod \"ovn-controller-4gdx9\" (UID: \"058302c4-d304-4a99-afbc-84a558968cfe\") " pod="openstack/ovn-controller-4gdx9" Oct 11 05:06:19 crc kubenswrapper[4651]: I1011 05:06:19.730027 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/058302c4-d304-4a99-afbc-84a558968cfe-combined-ca-bundle\") pod \"ovn-controller-4gdx9\" (UID: \"058302c4-d304-4a99-afbc-84a558968cfe\") " pod="openstack/ovn-controller-4gdx9" Oct 11 05:06:19 crc kubenswrapper[4651]: I1011 05:06:19.730588 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/058302c4-d304-4a99-afbc-84a558968cfe-var-run\") pod \"ovn-controller-4gdx9\" (UID: \"058302c4-d304-4a99-afbc-84a558968cfe\") " pod="openstack/ovn-controller-4gdx9" Oct 11 05:06:19 crc kubenswrapper[4651]: I1011 05:06:19.730884 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/15e07425-1bc8-43c4-ab2a-9daf9e9f95bb-scripts\") pod \"ovn-controller-ovs-2rjqb\" (UID: \"15e07425-1bc8-43c4-ab2a-9daf9e9f95bb\") " pod="openstack/ovn-controller-ovs-2rjqb" Oct 11 05:06:19 crc kubenswrapper[4651]: I1011 05:06:19.731046 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/058302c4-d304-4a99-afbc-84a558968cfe-ovn-controller-tls-certs\") pod \"ovn-controller-4gdx9\" (UID: \"058302c4-d304-4a99-afbc-84a558968cfe\") " pod="openstack/ovn-controller-4gdx9" Oct 11 05:06:19 crc kubenswrapper[4651]: I1011 05:06:19.731089 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/15e07425-1bc8-43c4-ab2a-9daf9e9f95bb-var-run\") pod \"ovn-controller-ovs-2rjqb\" (UID: \"15e07425-1bc8-43c4-ab2a-9daf9e9f95bb\") " pod="openstack/ovn-controller-ovs-2rjqb" Oct 11 05:06:19 crc kubenswrapper[4651]: I1011 05:06:19.731114 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/15e07425-1bc8-43c4-ab2a-9daf9e9f95bb-var-log\") pod \"ovn-controller-ovs-2rjqb\" (UID: \"15e07425-1bc8-43c4-ab2a-9daf9e9f95bb\") " pod="openstack/ovn-controller-ovs-2rjqb" Oct 11 05:06:19 crc kubenswrapper[4651]: I1011 05:06:19.731138 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/15e07425-1bc8-43c4-ab2a-9daf9e9f95bb-var-lib\") pod \"ovn-controller-ovs-2rjqb\" (UID: \"15e07425-1bc8-43c4-ab2a-9daf9e9f95bb\") " pod="openstack/ovn-controller-ovs-2rjqb" Oct 11 05:06:19 crc kubenswrapper[4651]: I1011 05:06:19.731160 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zrc8k\" (UniqueName: \"kubernetes.io/projected/15e07425-1bc8-43c4-ab2a-9daf9e9f95bb-kube-api-access-zrc8k\") pod \"ovn-controller-ovs-2rjqb\" (UID: \"15e07425-1bc8-43c4-ab2a-9daf9e9f95bb\") " pod="openstack/ovn-controller-ovs-2rjqb" Oct 11 05:06:19 crc kubenswrapper[4651]: I1011 05:06:19.731189 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/058302c4-d304-4a99-afbc-84a558968cfe-var-run-ovn\") pod \"ovn-controller-4gdx9\" (UID: \"058302c4-d304-4a99-afbc-84a558968cfe\") " pod="openstack/ovn-controller-4gdx9" Oct 11 05:06:19 crc kubenswrapper[4651]: I1011 05:06:19.731207 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hj9d6\" (UniqueName: \"kubernetes.io/projected/058302c4-d304-4a99-afbc-84a558968cfe-kube-api-access-hj9d6\") pod \"ovn-controller-4gdx9\" (UID: \"058302c4-d304-4a99-afbc-84a558968cfe\") " pod="openstack/ovn-controller-4gdx9" Oct 11 05:06:19 crc kubenswrapper[4651]: I1011 05:06:19.731234 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/058302c4-d304-4a99-afbc-84a558968cfe-var-log-ovn\") pod \"ovn-controller-4gdx9\" (UID: \"058302c4-d304-4a99-afbc-84a558968cfe\") " pod="openstack/ovn-controller-4gdx9" Oct 11 05:06:19 crc kubenswrapper[4651]: I1011 05:06:19.731256 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/058302c4-d304-4a99-afbc-84a558968cfe-scripts\") pod \"ovn-controller-4gdx9\" (UID: \"058302c4-d304-4a99-afbc-84a558968cfe\") " pod="openstack/ovn-controller-4gdx9" Oct 11 05:06:19 crc kubenswrapper[4651]: I1011 05:06:19.731305 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/15e07425-1bc8-43c4-ab2a-9daf9e9f95bb-etc-ovs\") pod \"ovn-controller-ovs-2rjqb\" (UID: \"15e07425-1bc8-43c4-ab2a-9daf9e9f95bb\") " pod="openstack/ovn-controller-ovs-2rjqb" Oct 11 05:06:19 crc kubenswrapper[4651]: I1011 05:06:19.731509 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/15e07425-1bc8-43c4-ab2a-9daf9e9f95bb-etc-ovs\") pod \"ovn-controller-ovs-2rjqb\" (UID: \"15e07425-1bc8-43c4-ab2a-9daf9e9f95bb\") " pod="openstack/ovn-controller-ovs-2rjqb" Oct 11 05:06:19 crc kubenswrapper[4651]: I1011 05:06:19.731562 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/15e07425-1bc8-43c4-ab2a-9daf9e9f95bb-var-run\") pod \"ovn-controller-ovs-2rjqb\" (UID: \"15e07425-1bc8-43c4-ab2a-9daf9e9f95bb\") " pod="openstack/ovn-controller-ovs-2rjqb" Oct 11 05:06:19 crc kubenswrapper[4651]: I1011 05:06:19.731678 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/15e07425-1bc8-43c4-ab2a-9daf9e9f95bb-var-log\") pod \"ovn-controller-ovs-2rjqb\" (UID: \"15e07425-1bc8-43c4-ab2a-9daf9e9f95bb\") " pod="openstack/ovn-controller-ovs-2rjqb" Oct 11 05:06:19 crc kubenswrapper[4651]: I1011 05:06:19.731696 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/058302c4-d304-4a99-afbc-84a558968cfe-var-run-ovn\") pod \"ovn-controller-4gdx9\" (UID: \"058302c4-d304-4a99-afbc-84a558968cfe\") " pod="openstack/ovn-controller-4gdx9" Oct 11 05:06:19 crc kubenswrapper[4651]: I1011 05:06:19.731709 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/058302c4-d304-4a99-afbc-84a558968cfe-var-log-ovn\") pod \"ovn-controller-4gdx9\" (UID: \"058302c4-d304-4a99-afbc-84a558968cfe\") " pod="openstack/ovn-controller-4gdx9" Oct 11 05:06:19 crc kubenswrapper[4651]: I1011 05:06:19.732100 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/15e07425-1bc8-43c4-ab2a-9daf9e9f95bb-var-lib\") pod \"ovn-controller-ovs-2rjqb\" (UID: \"15e07425-1bc8-43c4-ab2a-9daf9e9f95bb\") " pod="openstack/ovn-controller-ovs-2rjqb" Oct 11 05:06:19 crc kubenswrapper[4651]: I1011 05:06:19.733444 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/058302c4-d304-4a99-afbc-84a558968cfe-scripts\") pod \"ovn-controller-4gdx9\" (UID: \"058302c4-d304-4a99-afbc-84a558968cfe\") " pod="openstack/ovn-controller-4gdx9" Oct 11 05:06:19 crc kubenswrapper[4651]: I1011 05:06:19.734587 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/058302c4-d304-4a99-afbc-84a558968cfe-combined-ca-bundle\") pod \"ovn-controller-4gdx9\" (UID: \"058302c4-d304-4a99-afbc-84a558968cfe\") " pod="openstack/ovn-controller-4gdx9" Oct 11 05:06:19 crc kubenswrapper[4651]: I1011 05:06:19.736346 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/15e07425-1bc8-43c4-ab2a-9daf9e9f95bb-scripts\") pod \"ovn-controller-ovs-2rjqb\" (UID: \"15e07425-1bc8-43c4-ab2a-9daf9e9f95bb\") " pod="openstack/ovn-controller-ovs-2rjqb" Oct 11 05:06:19 crc kubenswrapper[4651]: I1011 05:06:19.748286 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/058302c4-d304-4a99-afbc-84a558968cfe-ovn-controller-tls-certs\") pod \"ovn-controller-4gdx9\" (UID: \"058302c4-d304-4a99-afbc-84a558968cfe\") " pod="openstack/ovn-controller-4gdx9" Oct 11 05:06:19 crc kubenswrapper[4651]: I1011 05:06:19.749081 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hj9d6\" (UniqueName: \"kubernetes.io/projected/058302c4-d304-4a99-afbc-84a558968cfe-kube-api-access-hj9d6\") pod \"ovn-controller-4gdx9\" (UID: \"058302c4-d304-4a99-afbc-84a558968cfe\") " pod="openstack/ovn-controller-4gdx9" Oct 11 05:06:19 crc kubenswrapper[4651]: I1011 05:06:19.751321 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zrc8k\" (UniqueName: \"kubernetes.io/projected/15e07425-1bc8-43c4-ab2a-9daf9e9f95bb-kube-api-access-zrc8k\") pod \"ovn-controller-ovs-2rjqb\" (UID: \"15e07425-1bc8-43c4-ab2a-9daf9e9f95bb\") " pod="openstack/ovn-controller-ovs-2rjqb" Oct 11 05:06:19 crc kubenswrapper[4651]: I1011 05:06:19.855722 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-xmdrb" Oct 11 05:06:19 crc kubenswrapper[4651]: I1011 05:06:19.864429 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-4gdx9" Oct 11 05:06:19 crc kubenswrapper[4651]: I1011 05:06:19.885815 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-2rjqb" Oct 11 05:06:20 crc kubenswrapper[4651]: I1011 05:06:20.018957 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Oct 11 05:06:20 crc kubenswrapper[4651]: I1011 05:06:20.020449 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Oct 11 05:06:20 crc kubenswrapper[4651]: I1011 05:06:20.022221 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Oct 11 05:06:20 crc kubenswrapper[4651]: I1011 05:06:20.022536 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Oct 11 05:06:20 crc kubenswrapper[4651]: I1011 05:06:20.022647 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Oct 11 05:06:20 crc kubenswrapper[4651]: I1011 05:06:20.026501 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Oct 11 05:06:20 crc kubenswrapper[4651]: I1011 05:06:20.028360 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-2g9j7" Oct 11 05:06:20 crc kubenswrapper[4651]: I1011 05:06:20.041866 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Oct 11 05:06:20 crc kubenswrapper[4651]: I1011 05:06:20.139612 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/66e1ea71-4579-48c7-b0c9-8074d1a6f821-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"66e1ea71-4579-48c7-b0c9-8074d1a6f821\") " pod="openstack/ovsdbserver-nb-0" Oct 11 05:06:20 crc kubenswrapper[4651]: I1011 05:06:20.139660 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/66e1ea71-4579-48c7-b0c9-8074d1a6f821-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"66e1ea71-4579-48c7-b0c9-8074d1a6f821\") " pod="openstack/ovsdbserver-nb-0" Oct 11 05:06:20 crc kubenswrapper[4651]: I1011 05:06:20.139690 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bhqh8\" (UniqueName: \"kubernetes.io/projected/66e1ea71-4579-48c7-b0c9-8074d1a6f821-kube-api-access-bhqh8\") pod \"ovsdbserver-nb-0\" (UID: \"66e1ea71-4579-48c7-b0c9-8074d1a6f821\") " pod="openstack/ovsdbserver-nb-0" Oct 11 05:06:20 crc kubenswrapper[4651]: I1011 05:06:20.139742 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"ovsdbserver-nb-0\" (UID: \"66e1ea71-4579-48c7-b0c9-8074d1a6f821\") " pod="openstack/ovsdbserver-nb-0" Oct 11 05:06:20 crc kubenswrapper[4651]: I1011 05:06:20.139902 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66e1ea71-4579-48c7-b0c9-8074d1a6f821-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"66e1ea71-4579-48c7-b0c9-8074d1a6f821\") " pod="openstack/ovsdbserver-nb-0" Oct 11 05:06:20 crc kubenswrapper[4651]: I1011 05:06:20.139964 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/66e1ea71-4579-48c7-b0c9-8074d1a6f821-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"66e1ea71-4579-48c7-b0c9-8074d1a6f821\") " pod="openstack/ovsdbserver-nb-0" Oct 11 05:06:20 crc kubenswrapper[4651]: I1011 05:06:20.140007 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/66e1ea71-4579-48c7-b0c9-8074d1a6f821-config\") pod \"ovsdbserver-nb-0\" (UID: \"66e1ea71-4579-48c7-b0c9-8074d1a6f821\") " pod="openstack/ovsdbserver-nb-0" Oct 11 05:06:20 crc kubenswrapper[4651]: I1011 05:06:20.140131 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/66e1ea71-4579-48c7-b0c9-8074d1a6f821-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"66e1ea71-4579-48c7-b0c9-8074d1a6f821\") " pod="openstack/ovsdbserver-nb-0" Oct 11 05:06:20 crc kubenswrapper[4651]: I1011 05:06:20.241295 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/66e1ea71-4579-48c7-b0c9-8074d1a6f821-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"66e1ea71-4579-48c7-b0c9-8074d1a6f821\") " pod="openstack/ovsdbserver-nb-0" Oct 11 05:06:20 crc kubenswrapper[4651]: I1011 05:06:20.241354 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/66e1ea71-4579-48c7-b0c9-8074d1a6f821-config\") pod \"ovsdbserver-nb-0\" (UID: \"66e1ea71-4579-48c7-b0c9-8074d1a6f821\") " pod="openstack/ovsdbserver-nb-0" Oct 11 05:06:20 crc kubenswrapper[4651]: I1011 05:06:20.241430 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/66e1ea71-4579-48c7-b0c9-8074d1a6f821-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"66e1ea71-4579-48c7-b0c9-8074d1a6f821\") " pod="openstack/ovsdbserver-nb-0" Oct 11 05:06:20 crc kubenswrapper[4651]: I1011 05:06:20.241482 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/66e1ea71-4579-48c7-b0c9-8074d1a6f821-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"66e1ea71-4579-48c7-b0c9-8074d1a6f821\") " pod="openstack/ovsdbserver-nb-0" Oct 11 05:06:20 crc kubenswrapper[4651]: I1011 05:06:20.241508 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/66e1ea71-4579-48c7-b0c9-8074d1a6f821-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"66e1ea71-4579-48c7-b0c9-8074d1a6f821\") " pod="openstack/ovsdbserver-nb-0" Oct 11 05:06:20 crc kubenswrapper[4651]: I1011 05:06:20.241534 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bhqh8\" (UniqueName: \"kubernetes.io/projected/66e1ea71-4579-48c7-b0c9-8074d1a6f821-kube-api-access-bhqh8\") pod \"ovsdbserver-nb-0\" (UID: \"66e1ea71-4579-48c7-b0c9-8074d1a6f821\") " pod="openstack/ovsdbserver-nb-0" Oct 11 05:06:20 crc kubenswrapper[4651]: I1011 05:06:20.241558 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"ovsdbserver-nb-0\" (UID: \"66e1ea71-4579-48c7-b0c9-8074d1a6f821\") " pod="openstack/ovsdbserver-nb-0" Oct 11 05:06:20 crc kubenswrapper[4651]: I1011 05:06:20.241591 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66e1ea71-4579-48c7-b0c9-8074d1a6f821-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"66e1ea71-4579-48c7-b0c9-8074d1a6f821\") " pod="openstack/ovsdbserver-nb-0" Oct 11 05:06:20 crc kubenswrapper[4651]: I1011 05:06:20.242308 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/66e1ea71-4579-48c7-b0c9-8074d1a6f821-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"66e1ea71-4579-48c7-b0c9-8074d1a6f821\") " pod="openstack/ovsdbserver-nb-0" Oct 11 05:06:20 crc kubenswrapper[4651]: I1011 05:06:20.242528 4651 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"ovsdbserver-nb-0\" (UID: \"66e1ea71-4579-48c7-b0c9-8074d1a6f821\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/ovsdbserver-nb-0" Oct 11 05:06:20 crc kubenswrapper[4651]: I1011 05:06:20.244288 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/66e1ea71-4579-48c7-b0c9-8074d1a6f821-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"66e1ea71-4579-48c7-b0c9-8074d1a6f821\") " pod="openstack/ovsdbserver-nb-0" Oct 11 05:06:20 crc kubenswrapper[4651]: I1011 05:06:20.247012 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/66e1ea71-4579-48c7-b0c9-8074d1a6f821-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"66e1ea71-4579-48c7-b0c9-8074d1a6f821\") " pod="openstack/ovsdbserver-nb-0" Oct 11 05:06:20 crc kubenswrapper[4651]: I1011 05:06:20.247274 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/66e1ea71-4579-48c7-b0c9-8074d1a6f821-config\") pod \"ovsdbserver-nb-0\" (UID: \"66e1ea71-4579-48c7-b0c9-8074d1a6f821\") " pod="openstack/ovsdbserver-nb-0" Oct 11 05:06:20 crc kubenswrapper[4651]: I1011 05:06:20.255554 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/66e1ea71-4579-48c7-b0c9-8074d1a6f821-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"66e1ea71-4579-48c7-b0c9-8074d1a6f821\") " pod="openstack/ovsdbserver-nb-0" Oct 11 05:06:20 crc kubenswrapper[4651]: I1011 05:06:20.262636 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66e1ea71-4579-48c7-b0c9-8074d1a6f821-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"66e1ea71-4579-48c7-b0c9-8074d1a6f821\") " pod="openstack/ovsdbserver-nb-0" Oct 11 05:06:20 crc kubenswrapper[4651]: I1011 05:06:20.263677 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bhqh8\" (UniqueName: \"kubernetes.io/projected/66e1ea71-4579-48c7-b0c9-8074d1a6f821-kube-api-access-bhqh8\") pod \"ovsdbserver-nb-0\" (UID: \"66e1ea71-4579-48c7-b0c9-8074d1a6f821\") " pod="openstack/ovsdbserver-nb-0" Oct 11 05:06:20 crc kubenswrapper[4651]: I1011 05:06:20.277900 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"ovsdbserver-nb-0\" (UID: \"66e1ea71-4579-48c7-b0c9-8074d1a6f821\") " pod="openstack/ovsdbserver-nb-0" Oct 11 05:06:20 crc kubenswrapper[4651]: I1011 05:06:20.340932 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Oct 11 05:06:23 crc kubenswrapper[4651]: E1011 05:06:23.477902 4651 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Oct 11 05:06:23 crc kubenswrapper[4651]: E1011 05:06:23.478306 4651 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ndfhb5h667h568h584h5f9h58dh565h664h587h597h577h64bh5c4h66fh647hbdh68ch5c5h68dh686h5f7h64hd7hc6h55fh57bh98h57fh87h5fh57fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-rlkct,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-78dd6ddcc-plnsm_openstack(83b871e5-d44b-4c41-88ab-f8f04b76d16f): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 11 05:06:23 crc kubenswrapper[4651]: E1011 05:06:23.479515 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-78dd6ddcc-plnsm" podUID="83b871e5-d44b-4c41-88ab-f8f04b76d16f" Oct 11 05:06:23 crc kubenswrapper[4651]: E1011 05:06:23.499273 4651 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Oct 11 05:06:23 crc kubenswrapper[4651]: E1011 05:06:23.499405 4651 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nffh5bdhf4h5f8h79h55h77h58fh56dh7bh6fh578hbch55dh68h56bhd9h65dh57ch658hc9h566h666h688h58h65dh684h5d7h6ch575h5d6h88q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-qh7vw,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-675f4bcbfc-9v2w4_openstack(742f177f-d880-4e28-a826-e2e565ebef2e): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 11 05:06:23 crc kubenswrapper[4651]: E1011 05:06:23.500706 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-675f4bcbfc-9v2w4" podUID="742f177f-d880-4e28-a826-e2e565ebef2e" Oct 11 05:06:23 crc kubenswrapper[4651]: I1011 05:06:23.704177 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Oct 11 05:06:23 crc kubenswrapper[4651]: I1011 05:06:23.708298 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Oct 11 05:06:23 crc kubenswrapper[4651]: I1011 05:06:23.710721 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-q25xc" Oct 11 05:06:23 crc kubenswrapper[4651]: I1011 05:06:23.711075 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Oct 11 05:06:23 crc kubenswrapper[4651]: I1011 05:06:23.711281 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Oct 11 05:06:23 crc kubenswrapper[4651]: I1011 05:06:23.711293 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Oct 11 05:06:23 crc kubenswrapper[4651]: I1011 05:06:23.711330 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Oct 11 05:06:23 crc kubenswrapper[4651]: I1011 05:06:23.794347 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0ddbe514-235a-4191-9f6b-3d785b0b4d21-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"0ddbe514-235a-4191-9f6b-3d785b0b4d21\") " pod="openstack/ovsdbserver-sb-0" Oct 11 05:06:23 crc kubenswrapper[4651]: I1011 05:06:23.794407 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/0ddbe514-235a-4191-9f6b-3d785b0b4d21-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"0ddbe514-235a-4191-9f6b-3d785b0b4d21\") " pod="openstack/ovsdbserver-sb-0" Oct 11 05:06:23 crc kubenswrapper[4651]: I1011 05:06:23.794430 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/0ddbe514-235a-4191-9f6b-3d785b0b4d21-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"0ddbe514-235a-4191-9f6b-3d785b0b4d21\") " pod="openstack/ovsdbserver-sb-0" Oct 11 05:06:23 crc kubenswrapper[4651]: I1011 05:06:23.794455 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0ddbe514-235a-4191-9f6b-3d785b0b4d21-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"0ddbe514-235a-4191-9f6b-3d785b0b4d21\") " pod="openstack/ovsdbserver-sb-0" Oct 11 05:06:23 crc kubenswrapper[4651]: I1011 05:06:23.794477 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0ddbe514-235a-4191-9f6b-3d785b0b4d21-config\") pod \"ovsdbserver-sb-0\" (UID: \"0ddbe514-235a-4191-9f6b-3d785b0b4d21\") " pod="openstack/ovsdbserver-sb-0" Oct 11 05:06:23 crc kubenswrapper[4651]: I1011 05:06:23.794508 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/0ddbe514-235a-4191-9f6b-3d785b0b4d21-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"0ddbe514-235a-4191-9f6b-3d785b0b4d21\") " pod="openstack/ovsdbserver-sb-0" Oct 11 05:06:23 crc kubenswrapper[4651]: I1011 05:06:23.794560 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"ovsdbserver-sb-0\" (UID: \"0ddbe514-235a-4191-9f6b-3d785b0b4d21\") " pod="openstack/ovsdbserver-sb-0" Oct 11 05:06:23 crc kubenswrapper[4651]: I1011 05:06:23.794582 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pm8s6\" (UniqueName: \"kubernetes.io/projected/0ddbe514-235a-4191-9f6b-3d785b0b4d21-kube-api-access-pm8s6\") pod \"ovsdbserver-sb-0\" (UID: \"0ddbe514-235a-4191-9f6b-3d785b0b4d21\") " pod="openstack/ovsdbserver-sb-0" Oct 11 05:06:23 crc kubenswrapper[4651]: I1011 05:06:23.896078 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0ddbe514-235a-4191-9f6b-3d785b0b4d21-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"0ddbe514-235a-4191-9f6b-3d785b0b4d21\") " pod="openstack/ovsdbserver-sb-0" Oct 11 05:06:23 crc kubenswrapper[4651]: I1011 05:06:23.896170 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/0ddbe514-235a-4191-9f6b-3d785b0b4d21-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"0ddbe514-235a-4191-9f6b-3d785b0b4d21\") " pod="openstack/ovsdbserver-sb-0" Oct 11 05:06:23 crc kubenswrapper[4651]: I1011 05:06:23.896186 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/0ddbe514-235a-4191-9f6b-3d785b0b4d21-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"0ddbe514-235a-4191-9f6b-3d785b0b4d21\") " pod="openstack/ovsdbserver-sb-0" Oct 11 05:06:23 crc kubenswrapper[4651]: I1011 05:06:23.896209 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0ddbe514-235a-4191-9f6b-3d785b0b4d21-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"0ddbe514-235a-4191-9f6b-3d785b0b4d21\") " pod="openstack/ovsdbserver-sb-0" Oct 11 05:06:23 crc kubenswrapper[4651]: I1011 05:06:23.896232 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0ddbe514-235a-4191-9f6b-3d785b0b4d21-config\") pod \"ovsdbserver-sb-0\" (UID: \"0ddbe514-235a-4191-9f6b-3d785b0b4d21\") " pod="openstack/ovsdbserver-sb-0" Oct 11 05:06:23 crc kubenswrapper[4651]: I1011 05:06:23.896270 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/0ddbe514-235a-4191-9f6b-3d785b0b4d21-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"0ddbe514-235a-4191-9f6b-3d785b0b4d21\") " pod="openstack/ovsdbserver-sb-0" Oct 11 05:06:23 crc kubenswrapper[4651]: I1011 05:06:23.896335 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"ovsdbserver-sb-0\" (UID: \"0ddbe514-235a-4191-9f6b-3d785b0b4d21\") " pod="openstack/ovsdbserver-sb-0" Oct 11 05:06:23 crc kubenswrapper[4651]: I1011 05:06:23.896370 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pm8s6\" (UniqueName: \"kubernetes.io/projected/0ddbe514-235a-4191-9f6b-3d785b0b4d21-kube-api-access-pm8s6\") pod \"ovsdbserver-sb-0\" (UID: \"0ddbe514-235a-4191-9f6b-3d785b0b4d21\") " pod="openstack/ovsdbserver-sb-0" Oct 11 05:06:23 crc kubenswrapper[4651]: I1011 05:06:23.898204 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0ddbe514-235a-4191-9f6b-3d785b0b4d21-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"0ddbe514-235a-4191-9f6b-3d785b0b4d21\") " pod="openstack/ovsdbserver-sb-0" Oct 11 05:06:23 crc kubenswrapper[4651]: I1011 05:06:23.899909 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0ddbe514-235a-4191-9f6b-3d785b0b4d21-config\") pod \"ovsdbserver-sb-0\" (UID: \"0ddbe514-235a-4191-9f6b-3d785b0b4d21\") " pod="openstack/ovsdbserver-sb-0" Oct 11 05:06:23 crc kubenswrapper[4651]: I1011 05:06:23.900084 4651 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"ovsdbserver-sb-0\" (UID: \"0ddbe514-235a-4191-9f6b-3d785b0b4d21\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/ovsdbserver-sb-0" Oct 11 05:06:23 crc kubenswrapper[4651]: I1011 05:06:23.900569 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/0ddbe514-235a-4191-9f6b-3d785b0b4d21-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"0ddbe514-235a-4191-9f6b-3d785b0b4d21\") " pod="openstack/ovsdbserver-sb-0" Oct 11 05:06:23 crc kubenswrapper[4651]: I1011 05:06:23.904775 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0ddbe514-235a-4191-9f6b-3d785b0b4d21-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"0ddbe514-235a-4191-9f6b-3d785b0b4d21\") " pod="openstack/ovsdbserver-sb-0" Oct 11 05:06:23 crc kubenswrapper[4651]: I1011 05:06:23.908877 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/0ddbe514-235a-4191-9f6b-3d785b0b4d21-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"0ddbe514-235a-4191-9f6b-3d785b0b4d21\") " pod="openstack/ovsdbserver-sb-0" Oct 11 05:06:23 crc kubenswrapper[4651]: I1011 05:06:23.912071 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/0ddbe514-235a-4191-9f6b-3d785b0b4d21-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"0ddbe514-235a-4191-9f6b-3d785b0b4d21\") " pod="openstack/ovsdbserver-sb-0" Oct 11 05:06:23 crc kubenswrapper[4651]: I1011 05:06:23.916445 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pm8s6\" (UniqueName: \"kubernetes.io/projected/0ddbe514-235a-4191-9f6b-3d785b0b4d21-kube-api-access-pm8s6\") pod \"ovsdbserver-sb-0\" (UID: \"0ddbe514-235a-4191-9f6b-3d785b0b4d21\") " pod="openstack/ovsdbserver-sb-0" Oct 11 05:06:23 crc kubenswrapper[4651]: I1011 05:06:23.928196 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"ovsdbserver-sb-0\" (UID: \"0ddbe514-235a-4191-9f6b-3d785b0b4d21\") " pod="openstack/ovsdbserver-sb-0" Oct 11 05:06:24 crc kubenswrapper[4651]: I1011 05:06:24.064851 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 11 05:06:24 crc kubenswrapper[4651]: W1011 05:06:24.066171 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddbfeee44_d2ad_4a4b_814f_916176925aaf.slice/crio-eec70c36cad1d1c991e6ef4a4a83710cdc68f8a0b043be49b608c2cd7d765c0c WatchSource:0}: Error finding container eec70c36cad1d1c991e6ef4a4a83710cdc68f8a0b043be49b608c2cd7d765c0c: Status 404 returned error can't find the container with id eec70c36cad1d1c991e6ef4a4a83710cdc68f8a0b043be49b608c2cd7d765c0c Oct 11 05:06:24 crc kubenswrapper[4651]: I1011 05:06:24.068162 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Oct 11 05:06:24 crc kubenswrapper[4651]: I1011 05:06:24.132257 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"dbfeee44-d2ad-4a4b-814f-916176925aaf","Type":"ContainerStarted","Data":"eec70c36cad1d1c991e6ef4a4a83710cdc68f8a0b043be49b608c2cd7d765c0c"} Oct 11 05:06:24 crc kubenswrapper[4651]: I1011 05:06:24.224626 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Oct 11 05:06:24 crc kubenswrapper[4651]: I1011 05:06:24.232784 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-zm69b"] Oct 11 05:06:24 crc kubenswrapper[4651]: I1011 05:06:24.238040 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-89rdc"] Oct 11 05:06:24 crc kubenswrapper[4651]: I1011 05:06:24.242882 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Oct 11 05:06:24 crc kubenswrapper[4651]: I1011 05:06:24.251181 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Oct 11 05:06:24 crc kubenswrapper[4651]: W1011 05:06:24.253298 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7855690d_2913_4d7b_81de_f34a5f299431.slice/crio-ac5c5a3b09fc313f650d4c4df87f4fe9c71dcb9de759befbdf134680f10696a1 WatchSource:0}: Error finding container ac5c5a3b09fc313f650d4c4df87f4fe9c71dcb9de759befbdf134680f10696a1: Status 404 returned error can't find the container with id ac5c5a3b09fc313f650d4c4df87f4fe9c71dcb9de759befbdf134680f10696a1 Oct 11 05:06:24 crc kubenswrapper[4651]: W1011 05:06:24.253599 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc7fc16c5_4cac_4da2_82d1_226d056fe645.slice/crio-05d11fcf59306f043f052b7ea7d2bef153017ad65ff00a07fb5311ce6a453a6b WatchSource:0}: Error finding container 05d11fcf59306f043f052b7ea7d2bef153017ad65ff00a07fb5311ce6a453a6b: Status 404 returned error can't find the container with id 05d11fcf59306f043f052b7ea7d2bef153017ad65ff00a07fb5311ce6a453a6b Oct 11 05:06:24 crc kubenswrapper[4651]: W1011 05:06:24.255707 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podca84ab98_19fd_4cbf_ab28_acb4fced285f.slice/crio-a432887df2c9254a282706c7fa850b5ecd59398e60baf51fd3a84d8a8551f0bf WatchSource:0}: Error finding container a432887df2c9254a282706c7fa850b5ecd59398e60baf51fd3a84d8a8551f0bf: Status 404 returned error can't find the container with id a432887df2c9254a282706c7fa850b5ecd59398e60baf51fd3a84d8a8551f0bf Oct 11 05:06:24 crc kubenswrapper[4651]: W1011 05:06:24.257709 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6e4d1f39_f0c4_4a19_a525_d0119d4b77e5.slice/crio-2a607c69194fe52bc89927f241133914d8377993f2dc22ebb274af66e37a5c33 WatchSource:0}: Error finding container 2a607c69194fe52bc89927f241133914d8377993f2dc22ebb274af66e37a5c33: Status 404 returned error can't find the container with id 2a607c69194fe52bc89927f241133914d8377993f2dc22ebb274af66e37a5c33 Oct 11 05:06:24 crc kubenswrapper[4651]: W1011 05:06:24.259299 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod935c9395_17f6_4c8f_a08a_e3af25a75a9a.slice/crio-8e7b52c9e4deaea1c3acab06bc819303ec6e51409bdea8d27a11434a78f2aca2 WatchSource:0}: Error finding container 8e7b52c9e4deaea1c3acab06bc819303ec6e51409bdea8d27a11434a78f2aca2: Status 404 returned error can't find the container with id 8e7b52c9e4deaea1c3acab06bc819303ec6e51409bdea8d27a11434a78f2aca2 Oct 11 05:06:24 crc kubenswrapper[4651]: I1011 05:06:24.518629 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-9v2w4" Oct 11 05:06:24 crc kubenswrapper[4651]: I1011 05:06:24.529038 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-plnsm" Oct 11 05:06:24 crc kubenswrapper[4651]: I1011 05:06:24.606922 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/742f177f-d880-4e28-a826-e2e565ebef2e-config\") pod \"742f177f-d880-4e28-a826-e2e565ebef2e\" (UID: \"742f177f-d880-4e28-a826-e2e565ebef2e\") " Oct 11 05:06:24 crc kubenswrapper[4651]: I1011 05:06:24.606969 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/83b871e5-d44b-4c41-88ab-f8f04b76d16f-dns-svc\") pod \"83b871e5-d44b-4c41-88ab-f8f04b76d16f\" (UID: \"83b871e5-d44b-4c41-88ab-f8f04b76d16f\") " Oct 11 05:06:24 crc kubenswrapper[4651]: I1011 05:06:24.607057 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/83b871e5-d44b-4c41-88ab-f8f04b76d16f-config\") pod \"83b871e5-d44b-4c41-88ab-f8f04b76d16f\" (UID: \"83b871e5-d44b-4c41-88ab-f8f04b76d16f\") " Oct 11 05:06:24 crc kubenswrapper[4651]: I1011 05:06:24.607177 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qh7vw\" (UniqueName: \"kubernetes.io/projected/742f177f-d880-4e28-a826-e2e565ebef2e-kube-api-access-qh7vw\") pod \"742f177f-d880-4e28-a826-e2e565ebef2e\" (UID: \"742f177f-d880-4e28-a826-e2e565ebef2e\") " Oct 11 05:06:24 crc kubenswrapper[4651]: I1011 05:06:24.607238 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rlkct\" (UniqueName: \"kubernetes.io/projected/83b871e5-d44b-4c41-88ab-f8f04b76d16f-kube-api-access-rlkct\") pod \"83b871e5-d44b-4c41-88ab-f8f04b76d16f\" (UID: \"83b871e5-d44b-4c41-88ab-f8f04b76d16f\") " Oct 11 05:06:24 crc kubenswrapper[4651]: I1011 05:06:24.607618 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/83b871e5-d44b-4c41-88ab-f8f04b76d16f-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "83b871e5-d44b-4c41-88ab-f8f04b76d16f" (UID: "83b871e5-d44b-4c41-88ab-f8f04b76d16f"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:06:24 crc kubenswrapper[4651]: I1011 05:06:24.607710 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/83b871e5-d44b-4c41-88ab-f8f04b76d16f-config" (OuterVolumeSpecName: "config") pod "83b871e5-d44b-4c41-88ab-f8f04b76d16f" (UID: "83b871e5-d44b-4c41-88ab-f8f04b76d16f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:06:24 crc kubenswrapper[4651]: I1011 05:06:24.607919 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/742f177f-d880-4e28-a826-e2e565ebef2e-config" (OuterVolumeSpecName: "config") pod "742f177f-d880-4e28-a826-e2e565ebef2e" (UID: "742f177f-d880-4e28-a826-e2e565ebef2e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:06:24 crc kubenswrapper[4651]: I1011 05:06:24.611931 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/742f177f-d880-4e28-a826-e2e565ebef2e-kube-api-access-qh7vw" (OuterVolumeSpecName: "kube-api-access-qh7vw") pod "742f177f-d880-4e28-a826-e2e565ebef2e" (UID: "742f177f-d880-4e28-a826-e2e565ebef2e"). InnerVolumeSpecName "kube-api-access-qh7vw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:06:24 crc kubenswrapper[4651]: I1011 05:06:24.612002 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/83b871e5-d44b-4c41-88ab-f8f04b76d16f-kube-api-access-rlkct" (OuterVolumeSpecName: "kube-api-access-rlkct") pod "83b871e5-d44b-4c41-88ab-f8f04b76d16f" (UID: "83b871e5-d44b-4c41-88ab-f8f04b76d16f"). InnerVolumeSpecName "kube-api-access-rlkct". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:06:24 crc kubenswrapper[4651]: I1011 05:06:24.668101 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 11 05:06:24 crc kubenswrapper[4651]: I1011 05:06:24.673550 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-4gdx9"] Oct 11 05:06:24 crc kubenswrapper[4651]: I1011 05:06:24.690985 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 11 05:06:24 crc kubenswrapper[4651]: I1011 05:06:24.734545 4651 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/742f177f-d880-4e28-a826-e2e565ebef2e-config\") on node \"crc\" DevicePath \"\"" Oct 11 05:06:24 crc kubenswrapper[4651]: I1011 05:06:24.734575 4651 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/83b871e5-d44b-4c41-88ab-f8f04b76d16f-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 11 05:06:24 crc kubenswrapper[4651]: I1011 05:06:24.734593 4651 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/83b871e5-d44b-4c41-88ab-f8f04b76d16f-config\") on node \"crc\" DevicePath \"\"" Oct 11 05:06:24 crc kubenswrapper[4651]: I1011 05:06:24.734611 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qh7vw\" (UniqueName: \"kubernetes.io/projected/742f177f-d880-4e28-a826-e2e565ebef2e-kube-api-access-qh7vw\") on node \"crc\" DevicePath \"\"" Oct 11 05:06:24 crc kubenswrapper[4651]: I1011 05:06:24.735972 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rlkct\" (UniqueName: \"kubernetes.io/projected/83b871e5-d44b-4c41-88ab-f8f04b76d16f-kube-api-access-rlkct\") on node \"crc\" DevicePath \"\"" Oct 11 05:06:24 crc kubenswrapper[4651]: I1011 05:06:24.810230 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Oct 11 05:06:24 crc kubenswrapper[4651]: W1011 05:06:24.815437 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0ddbe514_235a_4191_9f6b_3d785b0b4d21.slice/crio-c4c358ba963ca2c7bf8da9804873f607122f188117ac67ff0002705487dc8261 WatchSource:0}: Error finding container c4c358ba963ca2c7bf8da9804873f607122f188117ac67ff0002705487dc8261: Status 404 returned error can't find the container with id c4c358ba963ca2c7bf8da9804873f607122f188117ac67ff0002705487dc8261 Oct 11 05:06:24 crc kubenswrapper[4651]: I1011 05:06:24.919939 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-2rjqb"] Oct 11 05:06:25 crc kubenswrapper[4651]: I1011 05:06:25.140100 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"935c9395-17f6-4c8f-a08a-e3af25a75a9a","Type":"ContainerStarted","Data":"8e7b52c9e4deaea1c3acab06bc819303ec6e51409bdea8d27a11434a78f2aca2"} Oct 11 05:06:25 crc kubenswrapper[4651]: I1011 05:06:25.141544 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"24cb59c7-7c18-42ef-9e4e-a9cad024bf49","Type":"ContainerStarted","Data":"699b0cee8260d752b55acff090e30914e841d78d619df3b21035971f1d55fe12"} Oct 11 05:06:25 crc kubenswrapper[4651]: I1011 05:06:25.143773 4651 generic.go:334] "Generic (PLEG): container finished" podID="ca84ab98-19fd-4cbf-ab28-acb4fced285f" containerID="2834835156f8ea76962c6759020fe737f7f6a025b380526fc167680cb68c1799" exitCode=0 Oct 11 05:06:25 crc kubenswrapper[4651]: I1011 05:06:25.143811 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-89rdc" event={"ID":"ca84ab98-19fd-4cbf-ab28-acb4fced285f","Type":"ContainerDied","Data":"2834835156f8ea76962c6759020fe737f7f6a025b380526fc167680cb68c1799"} Oct 11 05:06:25 crc kubenswrapper[4651]: I1011 05:06:25.143845 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-89rdc" event={"ID":"ca84ab98-19fd-4cbf-ab28-acb4fced285f","Type":"ContainerStarted","Data":"a432887df2c9254a282706c7fa850b5ecd59398e60baf51fd3a84d8a8551f0bf"} Oct 11 05:06:25 crc kubenswrapper[4651]: I1011 05:06:25.146146 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-plnsm" event={"ID":"83b871e5-d44b-4c41-88ab-f8f04b76d16f","Type":"ContainerDied","Data":"c6cdce3beecff0bc1390173e93f846c11a1756d10f3e3d52899ce9b12f3dfa8f"} Oct 11 05:06:25 crc kubenswrapper[4651]: I1011 05:06:25.146233 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-plnsm" Oct 11 05:06:25 crc kubenswrapper[4651]: I1011 05:06:25.151237 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-4gdx9" event={"ID":"058302c4-d304-4a99-afbc-84a558968cfe","Type":"ContainerStarted","Data":"8498f74c00627be5dd543b4af1d8b30438badea103394dd37d85be558617e94e"} Oct 11 05:06:25 crc kubenswrapper[4651]: I1011 05:06:25.152350 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-2rjqb" event={"ID":"15e07425-1bc8-43c4-ab2a-9daf9e9f95bb","Type":"ContainerStarted","Data":"d48762782a1ec7ea66d19219ba688fd2d26ebe7a82890f0f85a580ebbe905339"} Oct 11 05:06:25 crc kubenswrapper[4651]: I1011 05:06:25.154157 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"e73b125b-a52b-44bb-bbed-3a484f53a9cb","Type":"ContainerStarted","Data":"d48c4a545849869757ea0ab6851826f2a45b302ab86e6bb3fe9e35d0282e8add"} Oct 11 05:06:25 crc kubenswrapper[4651]: I1011 05:06:25.156030 4651 generic.go:334] "Generic (PLEG): container finished" podID="7855690d-2913-4d7b-81de-f34a5f299431" containerID="51e56dbbe03e8c911a573e199febbcbc2709fa5ff88b16e9d23a41d1c262dc7d" exitCode=0 Oct 11 05:06:25 crc kubenswrapper[4651]: I1011 05:06:25.156081 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-zm69b" event={"ID":"7855690d-2913-4d7b-81de-f34a5f299431","Type":"ContainerDied","Data":"51e56dbbe03e8c911a573e199febbcbc2709fa5ff88b16e9d23a41d1c262dc7d"} Oct 11 05:06:25 crc kubenswrapper[4651]: I1011 05:06:25.156110 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-zm69b" event={"ID":"7855690d-2913-4d7b-81de-f34a5f299431","Type":"ContainerStarted","Data":"ac5c5a3b09fc313f650d4c4df87f4fe9c71dcb9de759befbdf134680f10696a1"} Oct 11 05:06:25 crc kubenswrapper[4651]: I1011 05:06:25.161906 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"0ddbe514-235a-4191-9f6b-3d785b0b4d21","Type":"ContainerStarted","Data":"c4c358ba963ca2c7bf8da9804873f607122f188117ac67ff0002705487dc8261"} Oct 11 05:06:25 crc kubenswrapper[4651]: I1011 05:06:25.168421 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"6e4d1f39-f0c4-4a19-a525-d0119d4b77e5","Type":"ContainerStarted","Data":"2a607c69194fe52bc89927f241133914d8377993f2dc22ebb274af66e37a5c33"} Oct 11 05:06:25 crc kubenswrapper[4651]: I1011 05:06:25.172686 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"c7fc16c5-4cac-4da2-82d1-226d056fe645","Type":"ContainerStarted","Data":"05d11fcf59306f043f052b7ea7d2bef153017ad65ff00a07fb5311ce6a453a6b"} Oct 11 05:06:25 crc kubenswrapper[4651]: I1011 05:06:25.174557 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-9v2w4" event={"ID":"742f177f-d880-4e28-a826-e2e565ebef2e","Type":"ContainerDied","Data":"eb73564d2b2af5e48c1246ceba6213701e2330f5175756502730409f0b10e809"} Oct 11 05:06:25 crc kubenswrapper[4651]: I1011 05:06:25.174622 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-9v2w4" Oct 11 05:06:25 crc kubenswrapper[4651]: I1011 05:06:25.254936 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-plnsm"] Oct 11 05:06:25 crc kubenswrapper[4651]: I1011 05:06:25.267912 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-plnsm"] Oct 11 05:06:25 crc kubenswrapper[4651]: I1011 05:06:25.282888 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-9v2w4"] Oct 11 05:06:25 crc kubenswrapper[4651]: I1011 05:06:25.289735 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-9v2w4"] Oct 11 05:06:25 crc kubenswrapper[4651]: I1011 05:06:25.344580 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Oct 11 05:06:25 crc kubenswrapper[4651]: W1011 05:06:25.352360 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod66e1ea71_4579_48c7_b0c9_8074d1a6f821.slice/crio-ff7ff07a6a27a5772b6cd1219ec442482104ac12cd562727c36650211d1cec3c WatchSource:0}: Error finding container ff7ff07a6a27a5772b6cd1219ec442482104ac12cd562727c36650211d1cec3c: Status 404 returned error can't find the container with id ff7ff07a6a27a5772b6cd1219ec442482104ac12cd562727c36650211d1cec3c Oct 11 05:06:25 crc kubenswrapper[4651]: E1011 05:06:25.389049 4651 log.go:32] "CreateContainer in sandbox from runtime service failed" err=< Oct 11 05:06:25 crc kubenswrapper[4651]: rpc error: code = Unknown desc = container create failed: mount `/var/lib/kubelet/pods/7855690d-2913-4d7b-81de-f34a5f299431/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory Oct 11 05:06:25 crc kubenswrapper[4651]: > podSandboxID="ac5c5a3b09fc313f650d4c4df87f4fe9c71dcb9de759befbdf134680f10696a1" Oct 11 05:06:25 crc kubenswrapper[4651]: E1011 05:06:25.389216 4651 kuberuntime_manager.go:1274] "Unhandled Error" err=< Oct 11 05:06:25 crc kubenswrapper[4651]: container &Container{Name:dnsmasq-dns,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n68chd6h679hbfh55fhc6h5ffh5d8h94h56ch589hb4hc5h57bh677hcdh655h8dh667h675h654h66ch567h8fh659h5b4h675h566h55bh54h67dh6dq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-6fdq5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 5353 },Host:,},GRPC:nil,},InitialDelaySeconds:3,TimeoutSeconds:5,PeriodSeconds:3,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 5353 },Host:,},GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-666b6646f7-zm69b_openstack(7855690d-2913-4d7b-81de-f34a5f299431): CreateContainerError: container create failed: mount `/var/lib/kubelet/pods/7855690d-2913-4d7b-81de-f34a5f299431/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory Oct 11 05:06:25 crc kubenswrapper[4651]: > logger="UnhandledError" Oct 11 05:06:25 crc kubenswrapper[4651]: E1011 05:06:25.390852 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"dnsmasq-dns\" with CreateContainerError: \"container create failed: mount `/var/lib/kubelet/pods/7855690d-2913-4d7b-81de-f34a5f299431/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory\\n\"" pod="openstack/dnsmasq-dns-666b6646f7-zm69b" podUID="7855690d-2913-4d7b-81de-f34a5f299431" Oct 11 05:06:25 crc kubenswrapper[4651]: I1011 05:06:25.896765 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="742f177f-d880-4e28-a826-e2e565ebef2e" path="/var/lib/kubelet/pods/742f177f-d880-4e28-a826-e2e565ebef2e/volumes" Oct 11 05:06:25 crc kubenswrapper[4651]: I1011 05:06:25.897201 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="83b871e5-d44b-4c41-88ab-f8f04b76d16f" path="/var/lib/kubelet/pods/83b871e5-d44b-4c41-88ab-f8f04b76d16f/volumes" Oct 11 05:06:26 crc kubenswrapper[4651]: I1011 05:06:26.186227 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-89rdc" event={"ID":"ca84ab98-19fd-4cbf-ab28-acb4fced285f","Type":"ContainerStarted","Data":"f938731219facf4908fc2a256b9eb9885190a59cfe0e357682dfd2ad6d729076"} Oct 11 05:06:26 crc kubenswrapper[4651]: I1011 05:06:26.186409 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-57d769cc4f-89rdc" Oct 11 05:06:26 crc kubenswrapper[4651]: I1011 05:06:26.188599 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"66e1ea71-4579-48c7-b0c9-8074d1a6f821","Type":"ContainerStarted","Data":"ff7ff07a6a27a5772b6cd1219ec442482104ac12cd562727c36650211d1cec3c"} Oct 11 05:06:26 crc kubenswrapper[4651]: I1011 05:06:26.205469 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-57d769cc4f-89rdc" podStartSLOduration=16.77727099 podStartE2EDuration="17.205448086s" podCreationTimestamp="2025-10-11 05:06:09 +0000 UTC" firstStartedPulling="2025-10-11 05:06:24.257942478 +0000 UTC m=+905.154175274" lastFinishedPulling="2025-10-11 05:06:24.686119564 +0000 UTC m=+905.582352370" observedRunningTime="2025-10-11 05:06:26.2017007 +0000 UTC m=+907.097933516" watchObservedRunningTime="2025-10-11 05:06:26.205448086 +0000 UTC m=+907.101680882" Oct 11 05:06:30 crc kubenswrapper[4651]: I1011 05:06:30.080066 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-57d769cc4f-89rdc" Oct 11 05:06:30 crc kubenswrapper[4651]: I1011 05:06:30.136879 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-zm69b"] Oct 11 05:06:33 crc kubenswrapper[4651]: I1011 05:06:33.244049 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"c7fc16c5-4cac-4da2-82d1-226d056fe645","Type":"ContainerStarted","Data":"265aa848bb3b4b33a712e5a71f1cdcd1be7e61235b12bd8cbaddefbed85b78d3"} Oct 11 05:06:33 crc kubenswrapper[4651]: I1011 05:06:33.247051 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-zm69b" event={"ID":"7855690d-2913-4d7b-81de-f34a5f299431","Type":"ContainerStarted","Data":"d890ed43d4e35ccc37fe4a95ee122e00cf0f36168e78b5fc27abaac122a85dd1"} Oct 11 05:06:33 crc kubenswrapper[4651]: I1011 05:06:33.247372 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-666b6646f7-zm69b" podUID="7855690d-2913-4d7b-81de-f34a5f299431" containerName="dnsmasq-dns" containerID="cri-o://d890ed43d4e35ccc37fe4a95ee122e00cf0f36168e78b5fc27abaac122a85dd1" gracePeriod=10 Oct 11 05:06:33 crc kubenswrapper[4651]: I1011 05:06:33.247584 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-666b6646f7-zm69b" Oct 11 05:06:33 crc kubenswrapper[4651]: I1011 05:06:33.249701 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"935c9395-17f6-4c8f-a08a-e3af25a75a9a","Type":"ContainerStarted","Data":"73924f7f95f9d2496f1fb1fc5112a08d04b7b7112234abe1adbcbf5ff363b3f3"} Oct 11 05:06:33 crc kubenswrapper[4651]: I1011 05:06:33.250005 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Oct 11 05:06:33 crc kubenswrapper[4651]: I1011 05:06:33.253587 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"66e1ea71-4579-48c7-b0c9-8074d1a6f821","Type":"ContainerStarted","Data":"9194375a77c7d8b230509c1f1fc98b6b0bd2aeeaca659d07c8b67e54e7fd673c"} Oct 11 05:06:33 crc kubenswrapper[4651]: I1011 05:06:33.256543 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-4gdx9" event={"ID":"058302c4-d304-4a99-afbc-84a558968cfe","Type":"ContainerStarted","Data":"b99e4252ac1a6f64c8821ee154952caff4a65394da0d5997b4737eda370b5038"} Oct 11 05:06:33 crc kubenswrapper[4651]: I1011 05:06:33.257054 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-4gdx9" Oct 11 05:06:33 crc kubenswrapper[4651]: I1011 05:06:33.259007 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"6e4d1f39-f0c4-4a19-a525-d0119d4b77e5","Type":"ContainerStarted","Data":"433b27e9bd4e98003c46a226a83869b4c6ebe0239c2ee0ffff741a76a94a74e0"} Oct 11 05:06:33 crc kubenswrapper[4651]: I1011 05:06:33.260898 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-2rjqb" event={"ID":"15e07425-1bc8-43c4-ab2a-9daf9e9f95bb","Type":"ContainerStarted","Data":"a7c24f574c53f9156f360cb9f6240f4ada264987db6275eabb0b0c7d9e142d94"} Oct 11 05:06:33 crc kubenswrapper[4651]: I1011 05:06:33.319064 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=12.90423974 podStartE2EDuration="20.319036869s" podCreationTimestamp="2025-10-11 05:06:13 +0000 UTC" firstStartedPulling="2025-10-11 05:06:24.261616592 +0000 UTC m=+905.157849388" lastFinishedPulling="2025-10-11 05:06:31.676413711 +0000 UTC m=+912.572646517" observedRunningTime="2025-10-11 05:06:33.312711868 +0000 UTC m=+914.208944694" watchObservedRunningTime="2025-10-11 05:06:33.319036869 +0000 UTC m=+914.215269685" Oct 11 05:06:33 crc kubenswrapper[4651]: I1011 05:06:33.321864 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-666b6646f7-zm69b" podStartSLOduration=23.892378923 podStartE2EDuration="24.321851261s" podCreationTimestamp="2025-10-11 05:06:09 +0000 UTC" firstStartedPulling="2025-10-11 05:06:24.258186615 +0000 UTC m=+905.154419411" lastFinishedPulling="2025-10-11 05:06:24.687658943 +0000 UTC m=+905.583891749" observedRunningTime="2025-10-11 05:06:33.297403399 +0000 UTC m=+914.193636255" watchObservedRunningTime="2025-10-11 05:06:33.321851261 +0000 UTC m=+914.218084067" Oct 11 05:06:33 crc kubenswrapper[4651]: I1011 05:06:33.336023 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-4gdx9" podStartSLOduration=7.010404262 podStartE2EDuration="14.336005241s" podCreationTimestamp="2025-10-11 05:06:19 +0000 UTC" firstStartedPulling="2025-10-11 05:06:24.678471009 +0000 UTC m=+905.574703815" lastFinishedPulling="2025-10-11 05:06:32.004071958 +0000 UTC m=+912.900304794" observedRunningTime="2025-10-11 05:06:33.331979719 +0000 UTC m=+914.228212525" watchObservedRunningTime="2025-10-11 05:06:33.336005241 +0000 UTC m=+914.232238047" Oct 11 05:06:33 crc kubenswrapper[4651]: I1011 05:06:33.878908 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-zm69b" Oct 11 05:06:33 crc kubenswrapper[4651]: I1011 05:06:33.994457 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7855690d-2913-4d7b-81de-f34a5f299431-config\") pod \"7855690d-2913-4d7b-81de-f34a5f299431\" (UID: \"7855690d-2913-4d7b-81de-f34a5f299431\") " Oct 11 05:06:33 crc kubenswrapper[4651]: I1011 05:06:33.994523 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7855690d-2913-4d7b-81de-f34a5f299431-dns-svc\") pod \"7855690d-2913-4d7b-81de-f34a5f299431\" (UID: \"7855690d-2913-4d7b-81de-f34a5f299431\") " Oct 11 05:06:33 crc kubenswrapper[4651]: I1011 05:06:33.994640 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6fdq5\" (UniqueName: \"kubernetes.io/projected/7855690d-2913-4d7b-81de-f34a5f299431-kube-api-access-6fdq5\") pod \"7855690d-2913-4d7b-81de-f34a5f299431\" (UID: \"7855690d-2913-4d7b-81de-f34a5f299431\") " Oct 11 05:06:34 crc kubenswrapper[4651]: I1011 05:06:34.001023 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7855690d-2913-4d7b-81de-f34a5f299431-kube-api-access-6fdq5" (OuterVolumeSpecName: "kube-api-access-6fdq5") pod "7855690d-2913-4d7b-81de-f34a5f299431" (UID: "7855690d-2913-4d7b-81de-f34a5f299431"). InnerVolumeSpecName "kube-api-access-6fdq5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:06:34 crc kubenswrapper[4651]: I1011 05:06:34.036523 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7855690d-2913-4d7b-81de-f34a5f299431-config" (OuterVolumeSpecName: "config") pod "7855690d-2913-4d7b-81de-f34a5f299431" (UID: "7855690d-2913-4d7b-81de-f34a5f299431"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:06:34 crc kubenswrapper[4651]: I1011 05:06:34.048211 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7855690d-2913-4d7b-81de-f34a5f299431-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "7855690d-2913-4d7b-81de-f34a5f299431" (UID: "7855690d-2913-4d7b-81de-f34a5f299431"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:06:34 crc kubenswrapper[4651]: I1011 05:06:34.096800 4651 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7855690d-2913-4d7b-81de-f34a5f299431-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 11 05:06:34 crc kubenswrapper[4651]: I1011 05:06:34.096841 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6fdq5\" (UniqueName: \"kubernetes.io/projected/7855690d-2913-4d7b-81de-f34a5f299431-kube-api-access-6fdq5\") on node \"crc\" DevicePath \"\"" Oct 11 05:06:34 crc kubenswrapper[4651]: I1011 05:06:34.096852 4651 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7855690d-2913-4d7b-81de-f34a5f299431-config\") on node \"crc\" DevicePath \"\"" Oct 11 05:06:34 crc kubenswrapper[4651]: I1011 05:06:34.292766 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"e73b125b-a52b-44bb-bbed-3a484f53a9cb","Type":"ContainerStarted","Data":"d0718945f619181b6b1f3000cd1eaed2003512a29de9e8f1b43985057d9b10d7"} Oct 11 05:06:34 crc kubenswrapper[4651]: I1011 05:06:34.305603 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"0ddbe514-235a-4191-9f6b-3d785b0b4d21","Type":"ContainerStarted","Data":"f5a5f6dfb934be086078fecfb7e08606ac0aa2671a64c6842aee2b76f8568435"} Oct 11 05:06:34 crc kubenswrapper[4651]: I1011 05:06:34.309960 4651 generic.go:334] "Generic (PLEG): container finished" podID="7855690d-2913-4d7b-81de-f34a5f299431" containerID="d890ed43d4e35ccc37fe4a95ee122e00cf0f36168e78b5fc27abaac122a85dd1" exitCode=0 Oct 11 05:06:34 crc kubenswrapper[4651]: I1011 05:06:34.310042 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-zm69b" event={"ID":"7855690d-2913-4d7b-81de-f34a5f299431","Type":"ContainerDied","Data":"d890ed43d4e35ccc37fe4a95ee122e00cf0f36168e78b5fc27abaac122a85dd1"} Oct 11 05:06:34 crc kubenswrapper[4651]: I1011 05:06:34.310075 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-zm69b" event={"ID":"7855690d-2913-4d7b-81de-f34a5f299431","Type":"ContainerDied","Data":"ac5c5a3b09fc313f650d4c4df87f4fe9c71dcb9de759befbdf134680f10696a1"} Oct 11 05:06:34 crc kubenswrapper[4651]: I1011 05:06:34.310092 4651 scope.go:117] "RemoveContainer" containerID="d890ed43d4e35ccc37fe4a95ee122e00cf0f36168e78b5fc27abaac122a85dd1" Oct 11 05:06:34 crc kubenswrapper[4651]: I1011 05:06:34.310200 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-zm69b" Oct 11 05:06:34 crc kubenswrapper[4651]: I1011 05:06:34.312407 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"24cb59c7-7c18-42ef-9e4e-a9cad024bf49","Type":"ContainerStarted","Data":"0f70749cf13488323f896ca1de8b2914c159624773deed1b202bfe91c06a1ab8"} Oct 11 05:06:34 crc kubenswrapper[4651]: I1011 05:06:34.312541 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Oct 11 05:06:34 crc kubenswrapper[4651]: I1011 05:06:34.313870 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"dbfeee44-d2ad-4a4b-814f-916176925aaf","Type":"ContainerStarted","Data":"c50a4b72004db94f4ca1044699f14bb5386093354b3b69c9543ac53d702d890e"} Oct 11 05:06:34 crc kubenswrapper[4651]: I1011 05:06:34.318198 4651 generic.go:334] "Generic (PLEG): container finished" podID="15e07425-1bc8-43c4-ab2a-9daf9e9f95bb" containerID="a7c24f574c53f9156f360cb9f6240f4ada264987db6275eabb0b0c7d9e142d94" exitCode=0 Oct 11 05:06:34 crc kubenswrapper[4651]: I1011 05:06:34.318705 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-2rjqb" event={"ID":"15e07425-1bc8-43c4-ab2a-9daf9e9f95bb","Type":"ContainerDied","Data":"a7c24f574c53f9156f360cb9f6240f4ada264987db6275eabb0b0c7d9e142d94"} Oct 11 05:06:34 crc kubenswrapper[4651]: I1011 05:06:34.338524 4651 scope.go:117] "RemoveContainer" containerID="51e56dbbe03e8c911a573e199febbcbc2709fa5ff88b16e9d23a41d1c262dc7d" Oct 11 05:06:34 crc kubenswrapper[4651]: I1011 05:06:34.341631 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=11.545478026 podStartE2EDuration="19.341608929s" podCreationTimestamp="2025-10-11 05:06:15 +0000 UTC" firstStartedPulling="2025-10-11 05:06:24.779738606 +0000 UTC m=+905.675971402" lastFinishedPulling="2025-10-11 05:06:32.575869509 +0000 UTC m=+913.472102305" observedRunningTime="2025-10-11 05:06:34.338982602 +0000 UTC m=+915.235215398" watchObservedRunningTime="2025-10-11 05:06:34.341608929 +0000 UTC m=+915.237841725" Oct 11 05:06:34 crc kubenswrapper[4651]: I1011 05:06:34.387095 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-zm69b"] Oct 11 05:06:34 crc kubenswrapper[4651]: I1011 05:06:34.393491 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-zm69b"] Oct 11 05:06:34 crc kubenswrapper[4651]: I1011 05:06:34.404286 4651 scope.go:117] "RemoveContainer" containerID="d890ed43d4e35ccc37fe4a95ee122e00cf0f36168e78b5fc27abaac122a85dd1" Oct 11 05:06:34 crc kubenswrapper[4651]: E1011 05:06:34.405239 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d890ed43d4e35ccc37fe4a95ee122e00cf0f36168e78b5fc27abaac122a85dd1\": container with ID starting with d890ed43d4e35ccc37fe4a95ee122e00cf0f36168e78b5fc27abaac122a85dd1 not found: ID does not exist" containerID="d890ed43d4e35ccc37fe4a95ee122e00cf0f36168e78b5fc27abaac122a85dd1" Oct 11 05:06:34 crc kubenswrapper[4651]: I1011 05:06:34.405280 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d890ed43d4e35ccc37fe4a95ee122e00cf0f36168e78b5fc27abaac122a85dd1"} err="failed to get container status \"d890ed43d4e35ccc37fe4a95ee122e00cf0f36168e78b5fc27abaac122a85dd1\": rpc error: code = NotFound desc = could not find container \"d890ed43d4e35ccc37fe4a95ee122e00cf0f36168e78b5fc27abaac122a85dd1\": container with ID starting with d890ed43d4e35ccc37fe4a95ee122e00cf0f36168e78b5fc27abaac122a85dd1 not found: ID does not exist" Oct 11 05:06:34 crc kubenswrapper[4651]: I1011 05:06:34.405309 4651 scope.go:117] "RemoveContainer" containerID="51e56dbbe03e8c911a573e199febbcbc2709fa5ff88b16e9d23a41d1c262dc7d" Oct 11 05:06:34 crc kubenswrapper[4651]: E1011 05:06:34.412138 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"51e56dbbe03e8c911a573e199febbcbc2709fa5ff88b16e9d23a41d1c262dc7d\": container with ID starting with 51e56dbbe03e8c911a573e199febbcbc2709fa5ff88b16e9d23a41d1c262dc7d not found: ID does not exist" containerID="51e56dbbe03e8c911a573e199febbcbc2709fa5ff88b16e9d23a41d1c262dc7d" Oct 11 05:06:34 crc kubenswrapper[4651]: I1011 05:06:34.412360 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"51e56dbbe03e8c911a573e199febbcbc2709fa5ff88b16e9d23a41d1c262dc7d"} err="failed to get container status \"51e56dbbe03e8c911a573e199febbcbc2709fa5ff88b16e9d23a41d1c262dc7d\": rpc error: code = NotFound desc = could not find container \"51e56dbbe03e8c911a573e199febbcbc2709fa5ff88b16e9d23a41d1c262dc7d\": container with ID starting with 51e56dbbe03e8c911a573e199febbcbc2709fa5ff88b16e9d23a41d1c262dc7d not found: ID does not exist" Oct 11 05:06:35 crc kubenswrapper[4651]: I1011 05:06:35.326147 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-2rjqb" event={"ID":"15e07425-1bc8-43c4-ab2a-9daf9e9f95bb","Type":"ContainerStarted","Data":"f87a0f00f57d9814ac2dfa89669ef7b416e9361161301661495ea1dafa257cf9"} Oct 11 05:06:35 crc kubenswrapper[4651]: I1011 05:06:35.327174 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-2rjqb" event={"ID":"15e07425-1bc8-43c4-ab2a-9daf9e9f95bb","Type":"ContainerStarted","Data":"feed424f0a342d8b7bde709b31c647f9d71a203910a8f4f5fffb42056558f570"} Oct 11 05:06:35 crc kubenswrapper[4651]: I1011 05:06:35.327294 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-2rjqb" Oct 11 05:06:35 crc kubenswrapper[4651]: I1011 05:06:35.327370 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-2rjqb" Oct 11 05:06:35 crc kubenswrapper[4651]: I1011 05:06:35.351655 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-2rjqb" podStartSLOduration=9.355975898 podStartE2EDuration="16.351638191s" podCreationTimestamp="2025-10-11 05:06:19 +0000 UTC" firstStartedPulling="2025-10-11 05:06:24.931379135 +0000 UTC m=+905.827611941" lastFinishedPulling="2025-10-11 05:06:31.927041428 +0000 UTC m=+912.823274234" observedRunningTime="2025-10-11 05:06:35.348995174 +0000 UTC m=+916.245227980" watchObservedRunningTime="2025-10-11 05:06:35.351638191 +0000 UTC m=+916.247870987" Oct 11 05:06:35 crc kubenswrapper[4651]: I1011 05:06:35.883057 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7855690d-2913-4d7b-81de-f34a5f299431" path="/var/lib/kubelet/pods/7855690d-2913-4d7b-81de-f34a5f299431/volumes" Oct 11 05:06:37 crc kubenswrapper[4651]: I1011 05:06:37.357548 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"66e1ea71-4579-48c7-b0c9-8074d1a6f821","Type":"ContainerStarted","Data":"3f82e9aef9f436153371ebbdd6e651024ed8e2b6ae5ade24920ed25f0d85255f"} Oct 11 05:06:37 crc kubenswrapper[4651]: I1011 05:06:37.358961 4651 generic.go:334] "Generic (PLEG): container finished" podID="6e4d1f39-f0c4-4a19-a525-d0119d4b77e5" containerID="433b27e9bd4e98003c46a226a83869b4c6ebe0239c2ee0ffff741a76a94a74e0" exitCode=0 Oct 11 05:06:37 crc kubenswrapper[4651]: I1011 05:06:37.359025 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"6e4d1f39-f0c4-4a19-a525-d0119d4b77e5","Type":"ContainerDied","Data":"433b27e9bd4e98003c46a226a83869b4c6ebe0239c2ee0ffff741a76a94a74e0"} Oct 11 05:06:37 crc kubenswrapper[4651]: I1011 05:06:37.362065 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"0ddbe514-235a-4191-9f6b-3d785b0b4d21","Type":"ContainerStarted","Data":"faecaba229e3f04bdcb7692884b32cb45b613ab99c54ee3807d4a6d43f9d3a56"} Oct 11 05:06:37 crc kubenswrapper[4651]: I1011 05:06:37.364589 4651 generic.go:334] "Generic (PLEG): container finished" podID="c7fc16c5-4cac-4da2-82d1-226d056fe645" containerID="265aa848bb3b4b33a712e5a71f1cdcd1be7e61235b12bd8cbaddefbed85b78d3" exitCode=0 Oct 11 05:06:37 crc kubenswrapper[4651]: I1011 05:06:37.364647 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"c7fc16c5-4cac-4da2-82d1-226d056fe645","Type":"ContainerDied","Data":"265aa848bb3b4b33a712e5a71f1cdcd1be7e61235b12bd8cbaddefbed85b78d3"} Oct 11 05:06:37 crc kubenswrapper[4651]: I1011 05:06:37.394270 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=8.151943632 podStartE2EDuration="19.394247617s" podCreationTimestamp="2025-10-11 05:06:18 +0000 UTC" firstStartedPulling="2025-10-11 05:06:25.355063296 +0000 UTC m=+906.251296082" lastFinishedPulling="2025-10-11 05:06:36.597367271 +0000 UTC m=+917.493600067" observedRunningTime="2025-10-11 05:06:37.386599183 +0000 UTC m=+918.282831999" watchObservedRunningTime="2025-10-11 05:06:37.394247617 +0000 UTC m=+918.290480423" Oct 11 05:06:37 crc kubenswrapper[4651]: I1011 05:06:37.431753 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=3.647134997 podStartE2EDuration="15.431736981s" podCreationTimestamp="2025-10-11 05:06:22 +0000 UTC" firstStartedPulling="2025-10-11 05:06:24.818283377 +0000 UTC m=+905.714516173" lastFinishedPulling="2025-10-11 05:06:36.602885341 +0000 UTC m=+917.499118157" observedRunningTime="2025-10-11 05:06:37.431427423 +0000 UTC m=+918.327660269" watchObservedRunningTime="2025-10-11 05:06:37.431736981 +0000 UTC m=+918.327969777" Oct 11 05:06:38 crc kubenswrapper[4651]: I1011 05:06:38.341234 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Oct 11 05:06:38 crc kubenswrapper[4651]: I1011 05:06:38.379405 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"6e4d1f39-f0c4-4a19-a525-d0119d4b77e5","Type":"ContainerStarted","Data":"0cc2521937904b961684237c0b992d80ea33df411d5a8991329d897c495d4516"} Oct 11 05:06:38 crc kubenswrapper[4651]: I1011 05:06:38.383366 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"c7fc16c5-4cac-4da2-82d1-226d056fe645","Type":"ContainerStarted","Data":"5eeedcee222dfc42db2ec60f2d11d1e736d1f53d13fc77014df13eadce98c44b"} Oct 11 05:06:38 crc kubenswrapper[4651]: I1011 05:06:38.405210 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Oct 11 05:06:38 crc kubenswrapper[4651]: I1011 05:06:38.412296 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=19.940815502 podStartE2EDuration="27.412274203s" podCreationTimestamp="2025-10-11 05:06:11 +0000 UTC" firstStartedPulling="2025-10-11 05:06:24.260533464 +0000 UTC m=+905.156766260" lastFinishedPulling="2025-10-11 05:06:31.731992165 +0000 UTC m=+912.628224961" observedRunningTime="2025-10-11 05:06:38.407931082 +0000 UTC m=+919.304163898" watchObservedRunningTime="2025-10-11 05:06:38.412274203 +0000 UTC m=+919.308506999" Oct 11 05:06:38 crc kubenswrapper[4651]: I1011 05:06:38.457025 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=18.237167007 podStartE2EDuration="26.457008991s" podCreationTimestamp="2025-10-11 05:06:12 +0000 UTC" firstStartedPulling="2025-10-11 05:06:24.255549948 +0000 UTC m=+905.151782744" lastFinishedPulling="2025-10-11 05:06:32.475391932 +0000 UTC m=+913.371624728" observedRunningTime="2025-10-11 05:06:38.453758768 +0000 UTC m=+919.349991564" watchObservedRunningTime="2025-10-11 05:06:38.457008991 +0000 UTC m=+919.353241787" Oct 11 05:06:39 crc kubenswrapper[4651]: I1011 05:06:39.069171 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Oct 11 05:06:39 crc kubenswrapper[4651]: I1011 05:06:39.069225 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Oct 11 05:06:39 crc kubenswrapper[4651]: I1011 05:06:39.146772 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Oct 11 05:06:39 crc kubenswrapper[4651]: I1011 05:06:39.269037 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Oct 11 05:06:39 crc kubenswrapper[4651]: I1011 05:06:39.391336 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Oct 11 05:06:39 crc kubenswrapper[4651]: I1011 05:06:39.433125 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Oct 11 05:06:39 crc kubenswrapper[4651]: I1011 05:06:39.434004 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Oct 11 05:06:39 crc kubenswrapper[4651]: I1011 05:06:39.725466 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6bc7876d45-km85b"] Oct 11 05:06:39 crc kubenswrapper[4651]: E1011 05:06:39.725792 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7855690d-2913-4d7b-81de-f34a5f299431" containerName="dnsmasq-dns" Oct 11 05:06:39 crc kubenswrapper[4651]: I1011 05:06:39.725809 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="7855690d-2913-4d7b-81de-f34a5f299431" containerName="dnsmasq-dns" Oct 11 05:06:39 crc kubenswrapper[4651]: E1011 05:06:39.725881 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7855690d-2913-4d7b-81de-f34a5f299431" containerName="init" Oct 11 05:06:39 crc kubenswrapper[4651]: I1011 05:06:39.725887 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="7855690d-2913-4d7b-81de-f34a5f299431" containerName="init" Oct 11 05:06:39 crc kubenswrapper[4651]: I1011 05:06:39.726031 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="7855690d-2913-4d7b-81de-f34a5f299431" containerName="dnsmasq-dns" Oct 11 05:06:39 crc kubenswrapper[4651]: I1011 05:06:39.726923 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bc7876d45-km85b" Oct 11 05:06:39 crc kubenswrapper[4651]: I1011 05:06:39.729002 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Oct 11 05:06:39 crc kubenswrapper[4651]: I1011 05:06:39.739013 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6bc7876d45-km85b"] Oct 11 05:06:39 crc kubenswrapper[4651]: I1011 05:06:39.907865 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0f4dae4c-9a14-431b-a226-dccff4fefd1d-ovsdbserver-sb\") pod \"dnsmasq-dns-6bc7876d45-km85b\" (UID: \"0f4dae4c-9a14-431b-a226-dccff4fefd1d\") " pod="openstack/dnsmasq-dns-6bc7876d45-km85b" Oct 11 05:06:39 crc kubenswrapper[4651]: I1011 05:06:39.907935 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wd7jd\" (UniqueName: \"kubernetes.io/projected/0f4dae4c-9a14-431b-a226-dccff4fefd1d-kube-api-access-wd7jd\") pod \"dnsmasq-dns-6bc7876d45-km85b\" (UID: \"0f4dae4c-9a14-431b-a226-dccff4fefd1d\") " pod="openstack/dnsmasq-dns-6bc7876d45-km85b" Oct 11 05:06:39 crc kubenswrapper[4651]: I1011 05:06:39.907973 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0f4dae4c-9a14-431b-a226-dccff4fefd1d-dns-svc\") pod \"dnsmasq-dns-6bc7876d45-km85b\" (UID: \"0f4dae4c-9a14-431b-a226-dccff4fefd1d\") " pod="openstack/dnsmasq-dns-6bc7876d45-km85b" Oct 11 05:06:39 crc kubenswrapper[4651]: I1011 05:06:39.908021 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0f4dae4c-9a14-431b-a226-dccff4fefd1d-config\") pod \"dnsmasq-dns-6bc7876d45-km85b\" (UID: \"0f4dae4c-9a14-431b-a226-dccff4fefd1d\") " pod="openstack/dnsmasq-dns-6bc7876d45-km85b" Oct 11 05:06:39 crc kubenswrapper[4651]: I1011 05:06:39.938801 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-6c9fd"] Oct 11 05:06:39 crc kubenswrapper[4651]: I1011 05:06:39.939983 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-6c9fd" Oct 11 05:06:39 crc kubenswrapper[4651]: I1011 05:06:39.944217 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Oct 11 05:06:39 crc kubenswrapper[4651]: I1011 05:06:39.948912 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-6c9fd"] Oct 11 05:06:40 crc kubenswrapper[4651]: I1011 05:06:40.009056 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wd7jd\" (UniqueName: \"kubernetes.io/projected/0f4dae4c-9a14-431b-a226-dccff4fefd1d-kube-api-access-wd7jd\") pod \"dnsmasq-dns-6bc7876d45-km85b\" (UID: \"0f4dae4c-9a14-431b-a226-dccff4fefd1d\") " pod="openstack/dnsmasq-dns-6bc7876d45-km85b" Oct 11 05:06:40 crc kubenswrapper[4651]: I1011 05:06:40.009114 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0f4dae4c-9a14-431b-a226-dccff4fefd1d-dns-svc\") pod \"dnsmasq-dns-6bc7876d45-km85b\" (UID: \"0f4dae4c-9a14-431b-a226-dccff4fefd1d\") " pod="openstack/dnsmasq-dns-6bc7876d45-km85b" Oct 11 05:06:40 crc kubenswrapper[4651]: I1011 05:06:40.009183 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0f4dae4c-9a14-431b-a226-dccff4fefd1d-config\") pod \"dnsmasq-dns-6bc7876d45-km85b\" (UID: \"0f4dae4c-9a14-431b-a226-dccff4fefd1d\") " pod="openstack/dnsmasq-dns-6bc7876d45-km85b" Oct 11 05:06:40 crc kubenswrapper[4651]: I1011 05:06:40.009286 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0f4dae4c-9a14-431b-a226-dccff4fefd1d-ovsdbserver-sb\") pod \"dnsmasq-dns-6bc7876d45-km85b\" (UID: \"0f4dae4c-9a14-431b-a226-dccff4fefd1d\") " pod="openstack/dnsmasq-dns-6bc7876d45-km85b" Oct 11 05:06:40 crc kubenswrapper[4651]: I1011 05:06:40.010317 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0f4dae4c-9a14-431b-a226-dccff4fefd1d-config\") pod \"dnsmasq-dns-6bc7876d45-km85b\" (UID: \"0f4dae4c-9a14-431b-a226-dccff4fefd1d\") " pod="openstack/dnsmasq-dns-6bc7876d45-km85b" Oct 11 05:06:40 crc kubenswrapper[4651]: I1011 05:06:40.010856 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0f4dae4c-9a14-431b-a226-dccff4fefd1d-dns-svc\") pod \"dnsmasq-dns-6bc7876d45-km85b\" (UID: \"0f4dae4c-9a14-431b-a226-dccff4fefd1d\") " pod="openstack/dnsmasq-dns-6bc7876d45-km85b" Oct 11 05:06:40 crc kubenswrapper[4651]: I1011 05:06:40.010936 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0f4dae4c-9a14-431b-a226-dccff4fefd1d-ovsdbserver-sb\") pod \"dnsmasq-dns-6bc7876d45-km85b\" (UID: \"0f4dae4c-9a14-431b-a226-dccff4fefd1d\") " pod="openstack/dnsmasq-dns-6bc7876d45-km85b" Oct 11 05:06:40 crc kubenswrapper[4651]: I1011 05:06:40.031337 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6bc7876d45-km85b"] Oct 11 05:06:40 crc kubenswrapper[4651]: E1011 05:06:40.031872 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[kube-api-access-wd7jd], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/dnsmasq-dns-6bc7876d45-km85b" podUID="0f4dae4c-9a14-431b-a226-dccff4fefd1d" Oct 11 05:06:40 crc kubenswrapper[4651]: I1011 05:06:40.039312 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wd7jd\" (UniqueName: \"kubernetes.io/projected/0f4dae4c-9a14-431b-a226-dccff4fefd1d-kube-api-access-wd7jd\") pod \"dnsmasq-dns-6bc7876d45-km85b\" (UID: \"0f4dae4c-9a14-431b-a226-dccff4fefd1d\") " pod="openstack/dnsmasq-dns-6bc7876d45-km85b" Oct 11 05:06:40 crc kubenswrapper[4651]: I1011 05:06:40.065340 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-8554648995-jjb2l"] Oct 11 05:06:40 crc kubenswrapper[4651]: I1011 05:06:40.066757 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554648995-jjb2l" Oct 11 05:06:40 crc kubenswrapper[4651]: I1011 05:06:40.071226 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Oct 11 05:06:40 crc kubenswrapper[4651]: I1011 05:06:40.088440 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8554648995-jjb2l"] Oct 11 05:06:40 crc kubenswrapper[4651]: I1011 05:06:40.100034 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Oct 11 05:06:40 crc kubenswrapper[4651]: I1011 05:06:40.105549 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Oct 11 05:06:40 crc kubenswrapper[4651]: I1011 05:06:40.110773 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6e416ba6-0c00-41f4-857d-3c53c9179e6b-config\") pod \"ovn-controller-metrics-6c9fd\" (UID: \"6e416ba6-0c00-41f4-857d-3c53c9179e6b\") " pod="openstack/ovn-controller-metrics-6c9fd" Oct 11 05:06:40 crc kubenswrapper[4651]: I1011 05:06:40.110867 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e416ba6-0c00-41f4-857d-3c53c9179e6b-combined-ca-bundle\") pod \"ovn-controller-metrics-6c9fd\" (UID: \"6e416ba6-0c00-41f4-857d-3c53c9179e6b\") " pod="openstack/ovn-controller-metrics-6c9fd" Oct 11 05:06:40 crc kubenswrapper[4651]: I1011 05:06:40.110933 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/6e416ba6-0c00-41f4-857d-3c53c9179e6b-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-6c9fd\" (UID: \"6e416ba6-0c00-41f4-857d-3c53c9179e6b\") " pod="openstack/ovn-controller-metrics-6c9fd" Oct 11 05:06:40 crc kubenswrapper[4651]: I1011 05:06:40.110968 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/6e416ba6-0c00-41f4-857d-3c53c9179e6b-ovs-rundir\") pod \"ovn-controller-metrics-6c9fd\" (UID: \"6e416ba6-0c00-41f4-857d-3c53c9179e6b\") " pod="openstack/ovn-controller-metrics-6c9fd" Oct 11 05:06:40 crc kubenswrapper[4651]: I1011 05:06:40.110999 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bjnds\" (UniqueName: \"kubernetes.io/projected/6e416ba6-0c00-41f4-857d-3c53c9179e6b-kube-api-access-bjnds\") pod \"ovn-controller-metrics-6c9fd\" (UID: \"6e416ba6-0c00-41f4-857d-3c53c9179e6b\") " pod="openstack/ovn-controller-metrics-6c9fd" Oct 11 05:06:40 crc kubenswrapper[4651]: I1011 05:06:40.111029 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/6e416ba6-0c00-41f4-857d-3c53c9179e6b-ovn-rundir\") pod \"ovn-controller-metrics-6c9fd\" (UID: \"6e416ba6-0c00-41f4-857d-3c53c9179e6b\") " pod="openstack/ovn-controller-metrics-6c9fd" Oct 11 05:06:40 crc kubenswrapper[4651]: I1011 05:06:40.117362 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-25zcj" Oct 11 05:06:40 crc kubenswrapper[4651]: I1011 05:06:40.117892 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Oct 11 05:06:40 crc kubenswrapper[4651]: I1011 05:06:40.118010 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Oct 11 05:06:40 crc kubenswrapper[4651]: I1011 05:06:40.118133 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Oct 11 05:06:40 crc kubenswrapper[4651]: I1011 05:06:40.127876 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Oct 11 05:06:40 crc kubenswrapper[4651]: I1011 05:06:40.212116 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6e416ba6-0c00-41f4-857d-3c53c9179e6b-config\") pod \"ovn-controller-metrics-6c9fd\" (UID: \"6e416ba6-0c00-41f4-857d-3c53c9179e6b\") " pod="openstack/ovn-controller-metrics-6c9fd" Oct 11 05:06:40 crc kubenswrapper[4651]: I1011 05:06:40.212170 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e416ba6-0c00-41f4-857d-3c53c9179e6b-combined-ca-bundle\") pod \"ovn-controller-metrics-6c9fd\" (UID: \"6e416ba6-0c00-41f4-857d-3c53c9179e6b\") " pod="openstack/ovn-controller-metrics-6c9fd" Oct 11 05:06:40 crc kubenswrapper[4651]: I1011 05:06:40.212199 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/a7f3b01c-ab92-45f7-9e89-a76a93a8db6a-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"a7f3b01c-ab92-45f7-9e89-a76a93a8db6a\") " pod="openstack/ovn-northd-0" Oct 11 05:06:40 crc kubenswrapper[4651]: I1011 05:06:40.212242 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m2zx9\" (UniqueName: \"kubernetes.io/projected/a7f3b01c-ab92-45f7-9e89-a76a93a8db6a-kube-api-access-m2zx9\") pod \"ovn-northd-0\" (UID: \"a7f3b01c-ab92-45f7-9e89-a76a93a8db6a\") " pod="openstack/ovn-northd-0" Oct 11 05:06:40 crc kubenswrapper[4651]: I1011 05:06:40.212271 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/a7f3b01c-ab92-45f7-9e89-a76a93a8db6a-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"a7f3b01c-ab92-45f7-9e89-a76a93a8db6a\") " pod="openstack/ovn-northd-0" Oct 11 05:06:40 crc kubenswrapper[4651]: I1011 05:06:40.212369 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/6e416ba6-0c00-41f4-857d-3c53c9179e6b-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-6c9fd\" (UID: \"6e416ba6-0c00-41f4-857d-3c53c9179e6b\") " pod="openstack/ovn-controller-metrics-6c9fd" Oct 11 05:06:40 crc kubenswrapper[4651]: I1011 05:06:40.212403 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a7f3b01c-ab92-45f7-9e89-a76a93a8db6a-scripts\") pod \"ovn-northd-0\" (UID: \"a7f3b01c-ab92-45f7-9e89-a76a93a8db6a\") " pod="openstack/ovn-northd-0" Oct 11 05:06:40 crc kubenswrapper[4651]: I1011 05:06:40.212445 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/6e416ba6-0c00-41f4-857d-3c53c9179e6b-ovs-rundir\") pod \"ovn-controller-metrics-6c9fd\" (UID: \"6e416ba6-0c00-41f4-857d-3c53c9179e6b\") " pod="openstack/ovn-controller-metrics-6c9fd" Oct 11 05:06:40 crc kubenswrapper[4651]: I1011 05:06:40.212679 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/6e416ba6-0c00-41f4-857d-3c53c9179e6b-ovs-rundir\") pod \"ovn-controller-metrics-6c9fd\" (UID: \"6e416ba6-0c00-41f4-857d-3c53c9179e6b\") " pod="openstack/ovn-controller-metrics-6c9fd" Oct 11 05:06:40 crc kubenswrapper[4651]: I1011 05:06:40.212468 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bjnds\" (UniqueName: \"kubernetes.io/projected/6e416ba6-0c00-41f4-857d-3c53c9179e6b-kube-api-access-bjnds\") pod \"ovn-controller-metrics-6c9fd\" (UID: \"6e416ba6-0c00-41f4-857d-3c53c9179e6b\") " pod="openstack/ovn-controller-metrics-6c9fd" Oct 11 05:06:40 crc kubenswrapper[4651]: I1011 05:06:40.212749 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/6e416ba6-0c00-41f4-857d-3c53c9179e6b-ovn-rundir\") pod \"ovn-controller-metrics-6c9fd\" (UID: \"6e416ba6-0c00-41f4-857d-3c53c9179e6b\") " pod="openstack/ovn-controller-metrics-6c9fd" Oct 11 05:06:40 crc kubenswrapper[4651]: I1011 05:06:40.212768 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/adb1281d-7bf8-4afa-ba64-989c39f857b2-config\") pod \"dnsmasq-dns-8554648995-jjb2l\" (UID: \"adb1281d-7bf8-4afa-ba64-989c39f857b2\") " pod="openstack/dnsmasq-dns-8554648995-jjb2l" Oct 11 05:06:40 crc kubenswrapper[4651]: I1011 05:06:40.212786 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/adb1281d-7bf8-4afa-ba64-989c39f857b2-dns-svc\") pod \"dnsmasq-dns-8554648995-jjb2l\" (UID: \"adb1281d-7bf8-4afa-ba64-989c39f857b2\") " pod="openstack/dnsmasq-dns-8554648995-jjb2l" Oct 11 05:06:40 crc kubenswrapper[4651]: I1011 05:06:40.212853 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/6e416ba6-0c00-41f4-857d-3c53c9179e6b-ovn-rundir\") pod \"ovn-controller-metrics-6c9fd\" (UID: \"6e416ba6-0c00-41f4-857d-3c53c9179e6b\") " pod="openstack/ovn-controller-metrics-6c9fd" Oct 11 05:06:40 crc kubenswrapper[4651]: I1011 05:06:40.212872 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cfd5q\" (UniqueName: \"kubernetes.io/projected/adb1281d-7bf8-4afa-ba64-989c39f857b2-kube-api-access-cfd5q\") pod \"dnsmasq-dns-8554648995-jjb2l\" (UID: \"adb1281d-7bf8-4afa-ba64-989c39f857b2\") " pod="openstack/dnsmasq-dns-8554648995-jjb2l" Oct 11 05:06:40 crc kubenswrapper[4651]: I1011 05:06:40.212898 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/a7f3b01c-ab92-45f7-9e89-a76a93a8db6a-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"a7f3b01c-ab92-45f7-9e89-a76a93a8db6a\") " pod="openstack/ovn-northd-0" Oct 11 05:06:40 crc kubenswrapper[4651]: I1011 05:06:40.212938 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/adb1281d-7bf8-4afa-ba64-989c39f857b2-ovsdbserver-nb\") pod \"dnsmasq-dns-8554648995-jjb2l\" (UID: \"adb1281d-7bf8-4afa-ba64-989c39f857b2\") " pod="openstack/dnsmasq-dns-8554648995-jjb2l" Oct 11 05:06:40 crc kubenswrapper[4651]: I1011 05:06:40.212964 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/adb1281d-7bf8-4afa-ba64-989c39f857b2-ovsdbserver-sb\") pod \"dnsmasq-dns-8554648995-jjb2l\" (UID: \"adb1281d-7bf8-4afa-ba64-989c39f857b2\") " pod="openstack/dnsmasq-dns-8554648995-jjb2l" Oct 11 05:06:40 crc kubenswrapper[4651]: I1011 05:06:40.212896 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6e416ba6-0c00-41f4-857d-3c53c9179e6b-config\") pod \"ovn-controller-metrics-6c9fd\" (UID: \"6e416ba6-0c00-41f4-857d-3c53c9179e6b\") " pod="openstack/ovn-controller-metrics-6c9fd" Oct 11 05:06:40 crc kubenswrapper[4651]: I1011 05:06:40.213013 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a7f3b01c-ab92-45f7-9e89-a76a93a8db6a-config\") pod \"ovn-northd-0\" (UID: \"a7f3b01c-ab92-45f7-9e89-a76a93a8db6a\") " pod="openstack/ovn-northd-0" Oct 11 05:06:40 crc kubenswrapper[4651]: I1011 05:06:40.213068 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7f3b01c-ab92-45f7-9e89-a76a93a8db6a-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"a7f3b01c-ab92-45f7-9e89-a76a93a8db6a\") " pod="openstack/ovn-northd-0" Oct 11 05:06:40 crc kubenswrapper[4651]: I1011 05:06:40.218258 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/6e416ba6-0c00-41f4-857d-3c53c9179e6b-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-6c9fd\" (UID: \"6e416ba6-0c00-41f4-857d-3c53c9179e6b\") " pod="openstack/ovn-controller-metrics-6c9fd" Oct 11 05:06:40 crc kubenswrapper[4651]: I1011 05:06:40.219180 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e416ba6-0c00-41f4-857d-3c53c9179e6b-combined-ca-bundle\") pod \"ovn-controller-metrics-6c9fd\" (UID: \"6e416ba6-0c00-41f4-857d-3c53c9179e6b\") " pod="openstack/ovn-controller-metrics-6c9fd" Oct 11 05:06:40 crc kubenswrapper[4651]: I1011 05:06:40.247710 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bjnds\" (UniqueName: \"kubernetes.io/projected/6e416ba6-0c00-41f4-857d-3c53c9179e6b-kube-api-access-bjnds\") pod \"ovn-controller-metrics-6c9fd\" (UID: \"6e416ba6-0c00-41f4-857d-3c53c9179e6b\") " pod="openstack/ovn-controller-metrics-6c9fd" Oct 11 05:06:40 crc kubenswrapper[4651]: I1011 05:06:40.274535 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-6c9fd" Oct 11 05:06:40 crc kubenswrapper[4651]: I1011 05:06:40.317791 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7f3b01c-ab92-45f7-9e89-a76a93a8db6a-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"a7f3b01c-ab92-45f7-9e89-a76a93a8db6a\") " pod="openstack/ovn-northd-0" Oct 11 05:06:40 crc kubenswrapper[4651]: I1011 05:06:40.317902 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/a7f3b01c-ab92-45f7-9e89-a76a93a8db6a-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"a7f3b01c-ab92-45f7-9e89-a76a93a8db6a\") " pod="openstack/ovn-northd-0" Oct 11 05:06:40 crc kubenswrapper[4651]: I1011 05:06:40.317945 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m2zx9\" (UniqueName: \"kubernetes.io/projected/a7f3b01c-ab92-45f7-9e89-a76a93a8db6a-kube-api-access-m2zx9\") pod \"ovn-northd-0\" (UID: \"a7f3b01c-ab92-45f7-9e89-a76a93a8db6a\") " pod="openstack/ovn-northd-0" Oct 11 05:06:40 crc kubenswrapper[4651]: I1011 05:06:40.317967 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/a7f3b01c-ab92-45f7-9e89-a76a93a8db6a-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"a7f3b01c-ab92-45f7-9e89-a76a93a8db6a\") " pod="openstack/ovn-northd-0" Oct 11 05:06:40 crc kubenswrapper[4651]: I1011 05:06:40.318000 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a7f3b01c-ab92-45f7-9e89-a76a93a8db6a-scripts\") pod \"ovn-northd-0\" (UID: \"a7f3b01c-ab92-45f7-9e89-a76a93a8db6a\") " pod="openstack/ovn-northd-0" Oct 11 05:06:40 crc kubenswrapper[4651]: I1011 05:06:40.318054 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/adb1281d-7bf8-4afa-ba64-989c39f857b2-config\") pod \"dnsmasq-dns-8554648995-jjb2l\" (UID: \"adb1281d-7bf8-4afa-ba64-989c39f857b2\") " pod="openstack/dnsmasq-dns-8554648995-jjb2l" Oct 11 05:06:40 crc kubenswrapper[4651]: I1011 05:06:40.318095 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/adb1281d-7bf8-4afa-ba64-989c39f857b2-dns-svc\") pod \"dnsmasq-dns-8554648995-jjb2l\" (UID: \"adb1281d-7bf8-4afa-ba64-989c39f857b2\") " pod="openstack/dnsmasq-dns-8554648995-jjb2l" Oct 11 05:06:40 crc kubenswrapper[4651]: I1011 05:06:40.318118 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cfd5q\" (UniqueName: \"kubernetes.io/projected/adb1281d-7bf8-4afa-ba64-989c39f857b2-kube-api-access-cfd5q\") pod \"dnsmasq-dns-8554648995-jjb2l\" (UID: \"adb1281d-7bf8-4afa-ba64-989c39f857b2\") " pod="openstack/dnsmasq-dns-8554648995-jjb2l" Oct 11 05:06:40 crc kubenswrapper[4651]: I1011 05:06:40.318145 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/a7f3b01c-ab92-45f7-9e89-a76a93a8db6a-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"a7f3b01c-ab92-45f7-9e89-a76a93a8db6a\") " pod="openstack/ovn-northd-0" Oct 11 05:06:40 crc kubenswrapper[4651]: I1011 05:06:40.318169 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/adb1281d-7bf8-4afa-ba64-989c39f857b2-ovsdbserver-nb\") pod \"dnsmasq-dns-8554648995-jjb2l\" (UID: \"adb1281d-7bf8-4afa-ba64-989c39f857b2\") " pod="openstack/dnsmasq-dns-8554648995-jjb2l" Oct 11 05:06:40 crc kubenswrapper[4651]: I1011 05:06:40.318190 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/adb1281d-7bf8-4afa-ba64-989c39f857b2-ovsdbserver-sb\") pod \"dnsmasq-dns-8554648995-jjb2l\" (UID: \"adb1281d-7bf8-4afa-ba64-989c39f857b2\") " pod="openstack/dnsmasq-dns-8554648995-jjb2l" Oct 11 05:06:40 crc kubenswrapper[4651]: I1011 05:06:40.318219 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a7f3b01c-ab92-45f7-9e89-a76a93a8db6a-config\") pod \"ovn-northd-0\" (UID: \"a7f3b01c-ab92-45f7-9e89-a76a93a8db6a\") " pod="openstack/ovn-northd-0" Oct 11 05:06:40 crc kubenswrapper[4651]: I1011 05:06:40.319496 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a7f3b01c-ab92-45f7-9e89-a76a93a8db6a-config\") pod \"ovn-northd-0\" (UID: \"a7f3b01c-ab92-45f7-9e89-a76a93a8db6a\") " pod="openstack/ovn-northd-0" Oct 11 05:06:40 crc kubenswrapper[4651]: I1011 05:06:40.322985 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/adb1281d-7bf8-4afa-ba64-989c39f857b2-config\") pod \"dnsmasq-dns-8554648995-jjb2l\" (UID: \"adb1281d-7bf8-4afa-ba64-989c39f857b2\") " pod="openstack/dnsmasq-dns-8554648995-jjb2l" Oct 11 05:06:40 crc kubenswrapper[4651]: I1011 05:06:40.324501 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/a7f3b01c-ab92-45f7-9e89-a76a93a8db6a-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"a7f3b01c-ab92-45f7-9e89-a76a93a8db6a\") " pod="openstack/ovn-northd-0" Oct 11 05:06:40 crc kubenswrapper[4651]: I1011 05:06:40.325249 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/adb1281d-7bf8-4afa-ba64-989c39f857b2-dns-svc\") pod \"dnsmasq-dns-8554648995-jjb2l\" (UID: \"adb1281d-7bf8-4afa-ba64-989c39f857b2\") " pod="openstack/dnsmasq-dns-8554648995-jjb2l" Oct 11 05:06:40 crc kubenswrapper[4651]: I1011 05:06:40.326938 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/adb1281d-7bf8-4afa-ba64-989c39f857b2-ovsdbserver-nb\") pod \"dnsmasq-dns-8554648995-jjb2l\" (UID: \"adb1281d-7bf8-4afa-ba64-989c39f857b2\") " pod="openstack/dnsmasq-dns-8554648995-jjb2l" Oct 11 05:06:40 crc kubenswrapper[4651]: I1011 05:06:40.327843 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a7f3b01c-ab92-45f7-9e89-a76a93a8db6a-scripts\") pod \"ovn-northd-0\" (UID: \"a7f3b01c-ab92-45f7-9e89-a76a93a8db6a\") " pod="openstack/ovn-northd-0" Oct 11 05:06:40 crc kubenswrapper[4651]: I1011 05:06:40.330504 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/adb1281d-7bf8-4afa-ba64-989c39f857b2-ovsdbserver-sb\") pod \"dnsmasq-dns-8554648995-jjb2l\" (UID: \"adb1281d-7bf8-4afa-ba64-989c39f857b2\") " pod="openstack/dnsmasq-dns-8554648995-jjb2l" Oct 11 05:06:40 crc kubenswrapper[4651]: I1011 05:06:40.343902 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/a7f3b01c-ab92-45f7-9e89-a76a93a8db6a-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"a7f3b01c-ab92-45f7-9e89-a76a93a8db6a\") " pod="openstack/ovn-northd-0" Oct 11 05:06:40 crc kubenswrapper[4651]: I1011 05:06:40.350419 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/a7f3b01c-ab92-45f7-9e89-a76a93a8db6a-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"a7f3b01c-ab92-45f7-9e89-a76a93a8db6a\") " pod="openstack/ovn-northd-0" Oct 11 05:06:40 crc kubenswrapper[4651]: I1011 05:06:40.351541 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7f3b01c-ab92-45f7-9e89-a76a93a8db6a-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"a7f3b01c-ab92-45f7-9e89-a76a93a8db6a\") " pod="openstack/ovn-northd-0" Oct 11 05:06:40 crc kubenswrapper[4651]: I1011 05:06:40.355628 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cfd5q\" (UniqueName: \"kubernetes.io/projected/adb1281d-7bf8-4afa-ba64-989c39f857b2-kube-api-access-cfd5q\") pod \"dnsmasq-dns-8554648995-jjb2l\" (UID: \"adb1281d-7bf8-4afa-ba64-989c39f857b2\") " pod="openstack/dnsmasq-dns-8554648995-jjb2l" Oct 11 05:06:40 crc kubenswrapper[4651]: I1011 05:06:40.384836 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m2zx9\" (UniqueName: \"kubernetes.io/projected/a7f3b01c-ab92-45f7-9e89-a76a93a8db6a-kube-api-access-m2zx9\") pod \"ovn-northd-0\" (UID: \"a7f3b01c-ab92-45f7-9e89-a76a93a8db6a\") " pod="openstack/ovn-northd-0" Oct 11 05:06:40 crc kubenswrapper[4651]: I1011 05:06:40.412350 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554648995-jjb2l" Oct 11 05:06:40 crc kubenswrapper[4651]: I1011 05:06:40.437695 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bc7876d45-km85b" Oct 11 05:06:40 crc kubenswrapper[4651]: I1011 05:06:40.438941 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Oct 11 05:06:40 crc kubenswrapper[4651]: I1011 05:06:40.454892 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bc7876d45-km85b" Oct 11 05:06:40 crc kubenswrapper[4651]: I1011 05:06:40.624951 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wd7jd\" (UniqueName: \"kubernetes.io/projected/0f4dae4c-9a14-431b-a226-dccff4fefd1d-kube-api-access-wd7jd\") pod \"0f4dae4c-9a14-431b-a226-dccff4fefd1d\" (UID: \"0f4dae4c-9a14-431b-a226-dccff4fefd1d\") " Oct 11 05:06:40 crc kubenswrapper[4651]: I1011 05:06:40.625046 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0f4dae4c-9a14-431b-a226-dccff4fefd1d-dns-svc\") pod \"0f4dae4c-9a14-431b-a226-dccff4fefd1d\" (UID: \"0f4dae4c-9a14-431b-a226-dccff4fefd1d\") " Oct 11 05:06:40 crc kubenswrapper[4651]: I1011 05:06:40.625126 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0f4dae4c-9a14-431b-a226-dccff4fefd1d-ovsdbserver-sb\") pod \"0f4dae4c-9a14-431b-a226-dccff4fefd1d\" (UID: \"0f4dae4c-9a14-431b-a226-dccff4fefd1d\") " Oct 11 05:06:40 crc kubenswrapper[4651]: I1011 05:06:40.625166 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0f4dae4c-9a14-431b-a226-dccff4fefd1d-config\") pod \"0f4dae4c-9a14-431b-a226-dccff4fefd1d\" (UID: \"0f4dae4c-9a14-431b-a226-dccff4fefd1d\") " Oct 11 05:06:40 crc kubenswrapper[4651]: I1011 05:06:40.625869 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0f4dae4c-9a14-431b-a226-dccff4fefd1d-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "0f4dae4c-9a14-431b-a226-dccff4fefd1d" (UID: "0f4dae4c-9a14-431b-a226-dccff4fefd1d"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:06:40 crc kubenswrapper[4651]: I1011 05:06:40.626267 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0f4dae4c-9a14-431b-a226-dccff4fefd1d-config" (OuterVolumeSpecName: "config") pod "0f4dae4c-9a14-431b-a226-dccff4fefd1d" (UID: "0f4dae4c-9a14-431b-a226-dccff4fefd1d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:06:40 crc kubenswrapper[4651]: I1011 05:06:40.630027 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0f4dae4c-9a14-431b-a226-dccff4fefd1d-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "0f4dae4c-9a14-431b-a226-dccff4fefd1d" (UID: "0f4dae4c-9a14-431b-a226-dccff4fefd1d"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:06:40 crc kubenswrapper[4651]: I1011 05:06:40.632503 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0f4dae4c-9a14-431b-a226-dccff4fefd1d-kube-api-access-wd7jd" (OuterVolumeSpecName: "kube-api-access-wd7jd") pod "0f4dae4c-9a14-431b-a226-dccff4fefd1d" (UID: "0f4dae4c-9a14-431b-a226-dccff4fefd1d"). InnerVolumeSpecName "kube-api-access-wd7jd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:06:40 crc kubenswrapper[4651]: I1011 05:06:40.727303 4651 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0f4dae4c-9a14-431b-a226-dccff4fefd1d-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 11 05:06:40 crc kubenswrapper[4651]: I1011 05:06:40.727568 4651 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0f4dae4c-9a14-431b-a226-dccff4fefd1d-config\") on node \"crc\" DevicePath \"\"" Oct 11 05:06:40 crc kubenswrapper[4651]: I1011 05:06:40.727579 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wd7jd\" (UniqueName: \"kubernetes.io/projected/0f4dae4c-9a14-431b-a226-dccff4fefd1d-kube-api-access-wd7jd\") on node \"crc\" DevicePath \"\"" Oct 11 05:06:40 crc kubenswrapper[4651]: I1011 05:06:40.727588 4651 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0f4dae4c-9a14-431b-a226-dccff4fefd1d-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 11 05:06:40 crc kubenswrapper[4651]: I1011 05:06:40.863571 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-6c9fd"] Oct 11 05:06:40 crc kubenswrapper[4651]: I1011 05:06:40.989498 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Oct 11 05:06:40 crc kubenswrapper[4651]: I1011 05:06:40.995525 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8554648995-jjb2l"] Oct 11 05:06:41 crc kubenswrapper[4651]: I1011 05:06:41.447547 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"a7f3b01c-ab92-45f7-9e89-a76a93a8db6a","Type":"ContainerStarted","Data":"0f36c699b1f6279f10ca976ab0cabeb456f1f08131147445a85c6ea7ab02135a"} Oct 11 05:06:41 crc kubenswrapper[4651]: I1011 05:06:41.449941 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-6c9fd" event={"ID":"6e416ba6-0c00-41f4-857d-3c53c9179e6b","Type":"ContainerStarted","Data":"7ae611b74b04f30ede3334cc27a1c118f0f9ac19e17cde993dfb0180083142af"} Oct 11 05:06:41 crc kubenswrapper[4651]: I1011 05:06:41.449967 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-6c9fd" event={"ID":"6e416ba6-0c00-41f4-857d-3c53c9179e6b","Type":"ContainerStarted","Data":"a2ac046e41a25de30f54018f7fa09224254f91bfeeb4920a6a3b46765a005427"} Oct 11 05:06:41 crc kubenswrapper[4651]: I1011 05:06:41.451852 4651 generic.go:334] "Generic (PLEG): container finished" podID="adb1281d-7bf8-4afa-ba64-989c39f857b2" containerID="ad3cae171c58e84d579daa0a973b928c8399a9bddebdf4e609e13d2096f6f430" exitCode=0 Oct 11 05:06:41 crc kubenswrapper[4651]: I1011 05:06:41.451908 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bc7876d45-km85b" Oct 11 05:06:41 crc kubenswrapper[4651]: I1011 05:06:41.451891 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-jjb2l" event={"ID":"adb1281d-7bf8-4afa-ba64-989c39f857b2","Type":"ContainerDied","Data":"ad3cae171c58e84d579daa0a973b928c8399a9bddebdf4e609e13d2096f6f430"} Oct 11 05:06:41 crc kubenswrapper[4651]: I1011 05:06:41.452088 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-jjb2l" event={"ID":"adb1281d-7bf8-4afa-ba64-989c39f857b2","Type":"ContainerStarted","Data":"549a800d871fc26039906abc0a81735556f8eb8f95c21deb1ba7309bea5dee04"} Oct 11 05:06:41 crc kubenswrapper[4651]: I1011 05:06:41.475341 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-6c9fd" podStartSLOduration=2.475320715 podStartE2EDuration="2.475320715s" podCreationTimestamp="2025-10-11 05:06:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 05:06:41.471836976 +0000 UTC m=+922.368069812" watchObservedRunningTime="2025-10-11 05:06:41.475320715 +0000 UTC m=+922.371553511" Oct 11 05:06:41 crc kubenswrapper[4651]: I1011 05:06:41.624780 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6bc7876d45-km85b"] Oct 11 05:06:41 crc kubenswrapper[4651]: I1011 05:06:41.633782 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6bc7876d45-km85b"] Oct 11 05:06:41 crc kubenswrapper[4651]: I1011 05:06:41.880668 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0f4dae4c-9a14-431b-a226-dccff4fefd1d" path="/var/lib/kubelet/pods/0f4dae4c-9a14-431b-a226-dccff4fefd1d/volumes" Oct 11 05:06:42 crc kubenswrapper[4651]: I1011 05:06:42.461442 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-jjb2l" event={"ID":"adb1281d-7bf8-4afa-ba64-989c39f857b2","Type":"ContainerStarted","Data":"e3662c435a44b690a621435e09bcb9a9f8c15449c7b7186c3e64396b403f29ac"} Oct 11 05:06:42 crc kubenswrapper[4651]: I1011 05:06:42.740633 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Oct 11 05:06:42 crc kubenswrapper[4651]: I1011 05:06:42.740667 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Oct 11 05:06:42 crc kubenswrapper[4651]: I1011 05:06:42.819286 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Oct 11 05:06:42 crc kubenswrapper[4651]: I1011 05:06:42.843058 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-8554648995-jjb2l" podStartSLOduration=2.843030718 podStartE2EDuration="2.843030718s" podCreationTimestamp="2025-10-11 05:06:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 05:06:42.483600421 +0000 UTC m=+923.379833217" watchObservedRunningTime="2025-10-11 05:06:42.843030718 +0000 UTC m=+923.739263544" Oct 11 05:06:43 crc kubenswrapper[4651]: I1011 05:06:43.469310 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"a7f3b01c-ab92-45f7-9e89-a76a93a8db6a","Type":"ContainerStarted","Data":"24135f7e6fc046cc2b1c7572fb52a5af316f23fbfcc167ac861ba70551df6a36"} Oct 11 05:06:43 crc kubenswrapper[4651]: I1011 05:06:43.469356 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"a7f3b01c-ab92-45f7-9e89-a76a93a8db6a","Type":"ContainerStarted","Data":"58a588c498bb3cab228e87e614fadd547ff7d2e301092eff92a938d0b0928cfb"} Oct 11 05:06:43 crc kubenswrapper[4651]: I1011 05:06:43.469889 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-8554648995-jjb2l" Oct 11 05:06:43 crc kubenswrapper[4651]: I1011 05:06:43.469945 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Oct 11 05:06:43 crc kubenswrapper[4651]: I1011 05:06:43.520278 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Oct 11 05:06:43 crc kubenswrapper[4651]: I1011 05:06:43.541693 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=2.184372419 podStartE2EDuration="3.541675286s" podCreationTimestamp="2025-10-11 05:06:40 +0000 UTC" firstStartedPulling="2025-10-11 05:06:40.994571232 +0000 UTC m=+921.890804028" lastFinishedPulling="2025-10-11 05:06:42.351874099 +0000 UTC m=+923.248106895" observedRunningTime="2025-10-11 05:06:43.492064703 +0000 UTC m=+924.388297499" watchObservedRunningTime="2025-10-11 05:06:43.541675286 +0000 UTC m=+924.437908102" Oct 11 05:06:43 crc kubenswrapper[4651]: I1011 05:06:43.960052 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-5gv5n"] Oct 11 05:06:43 crc kubenswrapper[4651]: I1011 05:06:43.962617 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-5gv5n" Oct 11 05:06:43 crc kubenswrapper[4651]: I1011 05:06:43.966711 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-5gv5n"] Oct 11 05:06:44 crc kubenswrapper[4651]: I1011 05:06:44.020965 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Oct 11 05:06:44 crc kubenswrapper[4651]: I1011 05:06:44.025970 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Oct 11 05:06:44 crc kubenswrapper[4651]: I1011 05:06:44.071939 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Oct 11 05:06:44 crc kubenswrapper[4651]: I1011 05:06:44.110752 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-twqht\" (UniqueName: \"kubernetes.io/projected/b502fe10-f784-49d8-8b56-b3384c15f4f7-kube-api-access-twqht\") pod \"keystone-db-create-5gv5n\" (UID: \"b502fe10-f784-49d8-8b56-b3384c15f4f7\") " pod="openstack/keystone-db-create-5gv5n" Oct 11 05:06:44 crc kubenswrapper[4651]: I1011 05:06:44.196745 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-lff65"] Oct 11 05:06:44 crc kubenswrapper[4651]: I1011 05:06:44.197788 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-lff65" Oct 11 05:06:44 crc kubenswrapper[4651]: I1011 05:06:44.204105 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-lff65"] Oct 11 05:06:44 crc kubenswrapper[4651]: I1011 05:06:44.218001 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-twqht\" (UniqueName: \"kubernetes.io/projected/b502fe10-f784-49d8-8b56-b3384c15f4f7-kube-api-access-twqht\") pod \"keystone-db-create-5gv5n\" (UID: \"b502fe10-f784-49d8-8b56-b3384c15f4f7\") " pod="openstack/keystone-db-create-5gv5n" Oct 11 05:06:44 crc kubenswrapper[4651]: I1011 05:06:44.236641 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-twqht\" (UniqueName: \"kubernetes.io/projected/b502fe10-f784-49d8-8b56-b3384c15f4f7-kube-api-access-twqht\") pod \"keystone-db-create-5gv5n\" (UID: \"b502fe10-f784-49d8-8b56-b3384c15f4f7\") " pod="openstack/keystone-db-create-5gv5n" Oct 11 05:06:44 crc kubenswrapper[4651]: I1011 05:06:44.286727 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-5gv5n" Oct 11 05:06:44 crc kubenswrapper[4651]: I1011 05:06:44.319469 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pplbc\" (UniqueName: \"kubernetes.io/projected/7f0c06fb-d21f-42e0-ad0d-a54ba7cea292-kube-api-access-pplbc\") pod \"placement-db-create-lff65\" (UID: \"7f0c06fb-d21f-42e0-ad0d-a54ba7cea292\") " pod="openstack/placement-db-create-lff65" Oct 11 05:06:44 crc kubenswrapper[4651]: I1011 05:06:44.422545 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pplbc\" (UniqueName: \"kubernetes.io/projected/7f0c06fb-d21f-42e0-ad0d-a54ba7cea292-kube-api-access-pplbc\") pod \"placement-db-create-lff65\" (UID: \"7f0c06fb-d21f-42e0-ad0d-a54ba7cea292\") " pod="openstack/placement-db-create-lff65" Oct 11 05:06:44 crc kubenswrapper[4651]: I1011 05:06:44.451994 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pplbc\" (UniqueName: \"kubernetes.io/projected/7f0c06fb-d21f-42e0-ad0d-a54ba7cea292-kube-api-access-pplbc\") pod \"placement-db-create-lff65\" (UID: \"7f0c06fb-d21f-42e0-ad0d-a54ba7cea292\") " pod="openstack/placement-db-create-lff65" Oct 11 05:06:44 crc kubenswrapper[4651]: I1011 05:06:44.521198 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-lff65" Oct 11 05:06:44 crc kubenswrapper[4651]: I1011 05:06:44.536495 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Oct 11 05:06:44 crc kubenswrapper[4651]: I1011 05:06:44.726181 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-5gv5n"] Oct 11 05:06:45 crc kubenswrapper[4651]: I1011 05:06:45.003473 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-lff65"] Oct 11 05:06:45 crc kubenswrapper[4651]: I1011 05:06:45.494324 4651 generic.go:334] "Generic (PLEG): container finished" podID="7f0c06fb-d21f-42e0-ad0d-a54ba7cea292" containerID="f8b6b2f56e80c4bfd3a9370cef23e7b470d388e9facffec9baccb9273cca0d33" exitCode=0 Oct 11 05:06:45 crc kubenswrapper[4651]: I1011 05:06:45.494386 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-lff65" event={"ID":"7f0c06fb-d21f-42e0-ad0d-a54ba7cea292","Type":"ContainerDied","Data":"f8b6b2f56e80c4bfd3a9370cef23e7b470d388e9facffec9baccb9273cca0d33"} Oct 11 05:06:45 crc kubenswrapper[4651]: I1011 05:06:45.494415 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-lff65" event={"ID":"7f0c06fb-d21f-42e0-ad0d-a54ba7cea292","Type":"ContainerStarted","Data":"158208e1eeb8bc9528f4b9f6931864a42b69df54b9fa27484b2cc3fb4f5a6cdd"} Oct 11 05:06:45 crc kubenswrapper[4651]: I1011 05:06:45.496373 4651 generic.go:334] "Generic (PLEG): container finished" podID="b502fe10-f784-49d8-8b56-b3384c15f4f7" containerID="b532309e0ff6a4f98e0e731ccf5914ba1cbaf5a40f9ab1e9b374e0c9f709f125" exitCode=0 Oct 11 05:06:45 crc kubenswrapper[4651]: I1011 05:06:45.496418 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-5gv5n" event={"ID":"b502fe10-f784-49d8-8b56-b3384c15f4f7","Type":"ContainerDied","Data":"b532309e0ff6a4f98e0e731ccf5914ba1cbaf5a40f9ab1e9b374e0c9f709f125"} Oct 11 05:06:45 crc kubenswrapper[4651]: I1011 05:06:45.496478 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-5gv5n" event={"ID":"b502fe10-f784-49d8-8b56-b3384c15f4f7","Type":"ContainerStarted","Data":"075e5c311ffff39177e3978a28a737f69bdc683d5fda8708dd3e39484d76b466"} Oct 11 05:06:46 crc kubenswrapper[4651]: I1011 05:06:46.310474 4651 patch_prober.go:28] interesting pod/machine-config-daemon-78jnv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 05:06:46 crc kubenswrapper[4651]: I1011 05:06:46.310791 4651 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 05:06:46 crc kubenswrapper[4651]: I1011 05:06:46.360168 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Oct 11 05:06:46 crc kubenswrapper[4651]: I1011 05:06:46.398619 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8554648995-jjb2l"] Oct 11 05:06:46 crc kubenswrapper[4651]: I1011 05:06:46.402951 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-8554648995-jjb2l" Oct 11 05:06:46 crc kubenswrapper[4651]: I1011 05:06:46.407931 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-8554648995-jjb2l" podUID="adb1281d-7bf8-4afa-ba64-989c39f857b2" containerName="dnsmasq-dns" containerID="cri-o://e3662c435a44b690a621435e09bcb9a9f8c15449c7b7186c3e64396b403f29ac" gracePeriod=10 Oct 11 05:06:46 crc kubenswrapper[4651]: I1011 05:06:46.453892 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-zvr5f"] Oct 11 05:06:46 crc kubenswrapper[4651]: I1011 05:06:46.455269 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8fbc5445-zvr5f" Oct 11 05:06:46 crc kubenswrapper[4651]: I1011 05:06:46.476940 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-zvr5f"] Oct 11 05:06:46 crc kubenswrapper[4651]: I1011 05:06:46.568711 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9q9td\" (UniqueName: \"kubernetes.io/projected/64f28073-6c52-4fe5-8474-f976cd26edc0-kube-api-access-9q9td\") pod \"dnsmasq-dns-b8fbc5445-zvr5f\" (UID: \"64f28073-6c52-4fe5-8474-f976cd26edc0\") " pod="openstack/dnsmasq-dns-b8fbc5445-zvr5f" Oct 11 05:06:46 crc kubenswrapper[4651]: I1011 05:06:46.568756 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/64f28073-6c52-4fe5-8474-f976cd26edc0-config\") pod \"dnsmasq-dns-b8fbc5445-zvr5f\" (UID: \"64f28073-6c52-4fe5-8474-f976cd26edc0\") " pod="openstack/dnsmasq-dns-b8fbc5445-zvr5f" Oct 11 05:06:46 crc kubenswrapper[4651]: I1011 05:06:46.568959 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/64f28073-6c52-4fe5-8474-f976cd26edc0-dns-svc\") pod \"dnsmasq-dns-b8fbc5445-zvr5f\" (UID: \"64f28073-6c52-4fe5-8474-f976cd26edc0\") " pod="openstack/dnsmasq-dns-b8fbc5445-zvr5f" Oct 11 05:06:46 crc kubenswrapper[4651]: I1011 05:06:46.568995 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/64f28073-6c52-4fe5-8474-f976cd26edc0-ovsdbserver-sb\") pod \"dnsmasq-dns-b8fbc5445-zvr5f\" (UID: \"64f28073-6c52-4fe5-8474-f976cd26edc0\") " pod="openstack/dnsmasq-dns-b8fbc5445-zvr5f" Oct 11 05:06:46 crc kubenswrapper[4651]: I1011 05:06:46.569035 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/64f28073-6c52-4fe5-8474-f976cd26edc0-ovsdbserver-nb\") pod \"dnsmasq-dns-b8fbc5445-zvr5f\" (UID: \"64f28073-6c52-4fe5-8474-f976cd26edc0\") " pod="openstack/dnsmasq-dns-b8fbc5445-zvr5f" Oct 11 05:06:46 crc kubenswrapper[4651]: I1011 05:06:46.670113 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/64f28073-6c52-4fe5-8474-f976cd26edc0-dns-svc\") pod \"dnsmasq-dns-b8fbc5445-zvr5f\" (UID: \"64f28073-6c52-4fe5-8474-f976cd26edc0\") " pod="openstack/dnsmasq-dns-b8fbc5445-zvr5f" Oct 11 05:06:46 crc kubenswrapper[4651]: I1011 05:06:46.670367 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/64f28073-6c52-4fe5-8474-f976cd26edc0-ovsdbserver-sb\") pod \"dnsmasq-dns-b8fbc5445-zvr5f\" (UID: \"64f28073-6c52-4fe5-8474-f976cd26edc0\") " pod="openstack/dnsmasq-dns-b8fbc5445-zvr5f" Oct 11 05:06:46 crc kubenswrapper[4651]: I1011 05:06:46.670392 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/64f28073-6c52-4fe5-8474-f976cd26edc0-ovsdbserver-nb\") pod \"dnsmasq-dns-b8fbc5445-zvr5f\" (UID: \"64f28073-6c52-4fe5-8474-f976cd26edc0\") " pod="openstack/dnsmasq-dns-b8fbc5445-zvr5f" Oct 11 05:06:46 crc kubenswrapper[4651]: I1011 05:06:46.670456 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9q9td\" (UniqueName: \"kubernetes.io/projected/64f28073-6c52-4fe5-8474-f976cd26edc0-kube-api-access-9q9td\") pod \"dnsmasq-dns-b8fbc5445-zvr5f\" (UID: \"64f28073-6c52-4fe5-8474-f976cd26edc0\") " pod="openstack/dnsmasq-dns-b8fbc5445-zvr5f" Oct 11 05:06:46 crc kubenswrapper[4651]: I1011 05:06:46.670474 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/64f28073-6c52-4fe5-8474-f976cd26edc0-config\") pod \"dnsmasq-dns-b8fbc5445-zvr5f\" (UID: \"64f28073-6c52-4fe5-8474-f976cd26edc0\") " pod="openstack/dnsmasq-dns-b8fbc5445-zvr5f" Oct 11 05:06:46 crc kubenswrapper[4651]: I1011 05:06:46.671448 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/64f28073-6c52-4fe5-8474-f976cd26edc0-config\") pod \"dnsmasq-dns-b8fbc5445-zvr5f\" (UID: \"64f28073-6c52-4fe5-8474-f976cd26edc0\") " pod="openstack/dnsmasq-dns-b8fbc5445-zvr5f" Oct 11 05:06:46 crc kubenswrapper[4651]: I1011 05:06:46.671789 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/64f28073-6c52-4fe5-8474-f976cd26edc0-ovsdbserver-sb\") pod \"dnsmasq-dns-b8fbc5445-zvr5f\" (UID: \"64f28073-6c52-4fe5-8474-f976cd26edc0\") " pod="openstack/dnsmasq-dns-b8fbc5445-zvr5f" Oct 11 05:06:46 crc kubenswrapper[4651]: I1011 05:06:46.671789 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/64f28073-6c52-4fe5-8474-f976cd26edc0-dns-svc\") pod \"dnsmasq-dns-b8fbc5445-zvr5f\" (UID: \"64f28073-6c52-4fe5-8474-f976cd26edc0\") " pod="openstack/dnsmasq-dns-b8fbc5445-zvr5f" Oct 11 05:06:46 crc kubenswrapper[4651]: I1011 05:06:46.671929 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/64f28073-6c52-4fe5-8474-f976cd26edc0-ovsdbserver-nb\") pod \"dnsmasq-dns-b8fbc5445-zvr5f\" (UID: \"64f28073-6c52-4fe5-8474-f976cd26edc0\") " pod="openstack/dnsmasq-dns-b8fbc5445-zvr5f" Oct 11 05:06:46 crc kubenswrapper[4651]: I1011 05:06:46.694695 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9q9td\" (UniqueName: \"kubernetes.io/projected/64f28073-6c52-4fe5-8474-f976cd26edc0-kube-api-access-9q9td\") pod \"dnsmasq-dns-b8fbc5445-zvr5f\" (UID: \"64f28073-6c52-4fe5-8474-f976cd26edc0\") " pod="openstack/dnsmasq-dns-b8fbc5445-zvr5f" Oct 11 05:06:46 crc kubenswrapper[4651]: I1011 05:06:46.794263 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8fbc5445-zvr5f" Oct 11 05:06:46 crc kubenswrapper[4651]: I1011 05:06:46.916383 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-lff65" Oct 11 05:06:46 crc kubenswrapper[4651]: I1011 05:06:46.973416 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pplbc\" (UniqueName: \"kubernetes.io/projected/7f0c06fb-d21f-42e0-ad0d-a54ba7cea292-kube-api-access-pplbc\") pod \"7f0c06fb-d21f-42e0-ad0d-a54ba7cea292\" (UID: \"7f0c06fb-d21f-42e0-ad0d-a54ba7cea292\") " Oct 11 05:06:46 crc kubenswrapper[4651]: I1011 05:06:46.979736 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7f0c06fb-d21f-42e0-ad0d-a54ba7cea292-kube-api-access-pplbc" (OuterVolumeSpecName: "kube-api-access-pplbc") pod "7f0c06fb-d21f-42e0-ad0d-a54ba7cea292" (UID: "7f0c06fb-d21f-42e0-ad0d-a54ba7cea292"). InnerVolumeSpecName "kube-api-access-pplbc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:06:46 crc kubenswrapper[4651]: I1011 05:06:46.983127 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554648995-jjb2l" Oct 11 05:06:47 crc kubenswrapper[4651]: I1011 05:06:47.027639 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-5gv5n" Oct 11 05:06:47 crc kubenswrapper[4651]: I1011 05:06:47.075956 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/adb1281d-7bf8-4afa-ba64-989c39f857b2-dns-svc\") pod \"adb1281d-7bf8-4afa-ba64-989c39f857b2\" (UID: \"adb1281d-7bf8-4afa-ba64-989c39f857b2\") " Oct 11 05:06:47 crc kubenswrapper[4651]: I1011 05:06:47.076013 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfd5q\" (UniqueName: \"kubernetes.io/projected/adb1281d-7bf8-4afa-ba64-989c39f857b2-kube-api-access-cfd5q\") pod \"adb1281d-7bf8-4afa-ba64-989c39f857b2\" (UID: \"adb1281d-7bf8-4afa-ba64-989c39f857b2\") " Oct 11 05:06:47 crc kubenswrapper[4651]: I1011 05:06:47.076098 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/adb1281d-7bf8-4afa-ba64-989c39f857b2-ovsdbserver-sb\") pod \"adb1281d-7bf8-4afa-ba64-989c39f857b2\" (UID: \"adb1281d-7bf8-4afa-ba64-989c39f857b2\") " Oct 11 05:06:47 crc kubenswrapper[4651]: I1011 05:06:47.076159 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-twqht\" (UniqueName: \"kubernetes.io/projected/b502fe10-f784-49d8-8b56-b3384c15f4f7-kube-api-access-twqht\") pod \"b502fe10-f784-49d8-8b56-b3384c15f4f7\" (UID: \"b502fe10-f784-49d8-8b56-b3384c15f4f7\") " Oct 11 05:06:47 crc kubenswrapper[4651]: I1011 05:06:47.076189 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/adb1281d-7bf8-4afa-ba64-989c39f857b2-ovsdbserver-nb\") pod \"adb1281d-7bf8-4afa-ba64-989c39f857b2\" (UID: \"adb1281d-7bf8-4afa-ba64-989c39f857b2\") " Oct 11 05:06:47 crc kubenswrapper[4651]: I1011 05:06:47.076256 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/adb1281d-7bf8-4afa-ba64-989c39f857b2-config\") pod \"adb1281d-7bf8-4afa-ba64-989c39f857b2\" (UID: \"adb1281d-7bf8-4afa-ba64-989c39f857b2\") " Oct 11 05:06:47 crc kubenswrapper[4651]: I1011 05:06:47.076567 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pplbc\" (UniqueName: \"kubernetes.io/projected/7f0c06fb-d21f-42e0-ad0d-a54ba7cea292-kube-api-access-pplbc\") on node \"crc\" DevicePath \"\"" Oct 11 05:06:47 crc kubenswrapper[4651]: I1011 05:06:47.082925 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/adb1281d-7bf8-4afa-ba64-989c39f857b2-kube-api-access-cfd5q" (OuterVolumeSpecName: "kube-api-access-cfd5q") pod "adb1281d-7bf8-4afa-ba64-989c39f857b2" (UID: "adb1281d-7bf8-4afa-ba64-989c39f857b2"). InnerVolumeSpecName "kube-api-access-cfd5q". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:06:47 crc kubenswrapper[4651]: I1011 05:06:47.087432 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b502fe10-f784-49d8-8b56-b3384c15f4f7-kube-api-access-twqht" (OuterVolumeSpecName: "kube-api-access-twqht") pod "b502fe10-f784-49d8-8b56-b3384c15f4f7" (UID: "b502fe10-f784-49d8-8b56-b3384c15f4f7"). InnerVolumeSpecName "kube-api-access-twqht". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:06:47 crc kubenswrapper[4651]: I1011 05:06:47.131290 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/adb1281d-7bf8-4afa-ba64-989c39f857b2-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "adb1281d-7bf8-4afa-ba64-989c39f857b2" (UID: "adb1281d-7bf8-4afa-ba64-989c39f857b2"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:06:47 crc kubenswrapper[4651]: I1011 05:06:47.131563 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/adb1281d-7bf8-4afa-ba64-989c39f857b2-config" (OuterVolumeSpecName: "config") pod "adb1281d-7bf8-4afa-ba64-989c39f857b2" (UID: "adb1281d-7bf8-4afa-ba64-989c39f857b2"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:06:47 crc kubenswrapper[4651]: I1011 05:06:47.131690 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/adb1281d-7bf8-4afa-ba64-989c39f857b2-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "adb1281d-7bf8-4afa-ba64-989c39f857b2" (UID: "adb1281d-7bf8-4afa-ba64-989c39f857b2"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:06:47 crc kubenswrapper[4651]: I1011 05:06:47.146515 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/adb1281d-7bf8-4afa-ba64-989c39f857b2-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "adb1281d-7bf8-4afa-ba64-989c39f857b2" (UID: "adb1281d-7bf8-4afa-ba64-989c39f857b2"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:06:47 crc kubenswrapper[4651]: I1011 05:06:47.178893 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfd5q\" (UniqueName: \"kubernetes.io/projected/adb1281d-7bf8-4afa-ba64-989c39f857b2-kube-api-access-cfd5q\") on node \"crc\" DevicePath \"\"" Oct 11 05:06:47 crc kubenswrapper[4651]: I1011 05:06:47.178946 4651 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/adb1281d-7bf8-4afa-ba64-989c39f857b2-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 11 05:06:47 crc kubenswrapper[4651]: I1011 05:06:47.178956 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-twqht\" (UniqueName: \"kubernetes.io/projected/b502fe10-f784-49d8-8b56-b3384c15f4f7-kube-api-access-twqht\") on node \"crc\" DevicePath \"\"" Oct 11 05:06:47 crc kubenswrapper[4651]: I1011 05:06:47.178964 4651 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/adb1281d-7bf8-4afa-ba64-989c39f857b2-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 11 05:06:47 crc kubenswrapper[4651]: I1011 05:06:47.178990 4651 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/adb1281d-7bf8-4afa-ba64-989c39f857b2-config\") on node \"crc\" DevicePath \"\"" Oct 11 05:06:47 crc kubenswrapper[4651]: I1011 05:06:47.178998 4651 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/adb1281d-7bf8-4afa-ba64-989c39f857b2-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 11 05:06:47 crc kubenswrapper[4651]: W1011 05:06:47.344511 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod64f28073_6c52_4fe5_8474_f976cd26edc0.slice/crio-0faf20d50dc1b9a257edd1fa98a1dc72f33c4bf7fcbe1c8f909cdf6272dea9da WatchSource:0}: Error finding container 0faf20d50dc1b9a257edd1fa98a1dc72f33c4bf7fcbe1c8f909cdf6272dea9da: Status 404 returned error can't find the container with id 0faf20d50dc1b9a257edd1fa98a1dc72f33c4bf7fcbe1c8f909cdf6272dea9da Oct 11 05:06:47 crc kubenswrapper[4651]: I1011 05:06:47.350761 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-zvr5f"] Oct 11 05:06:47 crc kubenswrapper[4651]: I1011 05:06:47.513563 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-5gv5n" event={"ID":"b502fe10-f784-49d8-8b56-b3384c15f4f7","Type":"ContainerDied","Data":"075e5c311ffff39177e3978a28a737f69bdc683d5fda8708dd3e39484d76b466"} Oct 11 05:06:47 crc kubenswrapper[4651]: I1011 05:06:47.514082 4651 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="075e5c311ffff39177e3978a28a737f69bdc683d5fda8708dd3e39484d76b466" Oct 11 05:06:47 crc kubenswrapper[4651]: I1011 05:06:47.513811 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-5gv5n" Oct 11 05:06:47 crc kubenswrapper[4651]: I1011 05:06:47.515641 4651 generic.go:334] "Generic (PLEG): container finished" podID="adb1281d-7bf8-4afa-ba64-989c39f857b2" containerID="e3662c435a44b690a621435e09bcb9a9f8c15449c7b7186c3e64396b403f29ac" exitCode=0 Oct 11 05:06:47 crc kubenswrapper[4651]: I1011 05:06:47.515709 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-jjb2l" event={"ID":"adb1281d-7bf8-4afa-ba64-989c39f857b2","Type":"ContainerDied","Data":"e3662c435a44b690a621435e09bcb9a9f8c15449c7b7186c3e64396b403f29ac"} Oct 11 05:06:47 crc kubenswrapper[4651]: I1011 05:06:47.515740 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-jjb2l" event={"ID":"adb1281d-7bf8-4afa-ba64-989c39f857b2","Type":"ContainerDied","Data":"549a800d871fc26039906abc0a81735556f8eb8f95c21deb1ba7309bea5dee04"} Oct 11 05:06:47 crc kubenswrapper[4651]: I1011 05:06:47.515759 4651 scope.go:117] "RemoveContainer" containerID="e3662c435a44b690a621435e09bcb9a9f8c15449c7b7186c3e64396b403f29ac" Oct 11 05:06:47 crc kubenswrapper[4651]: I1011 05:06:47.516181 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554648995-jjb2l" Oct 11 05:06:47 crc kubenswrapper[4651]: I1011 05:06:47.517022 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-zvr5f" event={"ID":"64f28073-6c52-4fe5-8474-f976cd26edc0","Type":"ContainerStarted","Data":"0faf20d50dc1b9a257edd1fa98a1dc72f33c4bf7fcbe1c8f909cdf6272dea9da"} Oct 11 05:06:47 crc kubenswrapper[4651]: I1011 05:06:47.518967 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-lff65" event={"ID":"7f0c06fb-d21f-42e0-ad0d-a54ba7cea292","Type":"ContainerDied","Data":"158208e1eeb8bc9528f4b9f6931864a42b69df54b9fa27484b2cc3fb4f5a6cdd"} Oct 11 05:06:47 crc kubenswrapper[4651]: I1011 05:06:47.519003 4651 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="158208e1eeb8bc9528f4b9f6931864a42b69df54b9fa27484b2cc3fb4f5a6cdd" Oct 11 05:06:47 crc kubenswrapper[4651]: I1011 05:06:47.519019 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-lff65" Oct 11 05:06:47 crc kubenswrapper[4651]: I1011 05:06:47.596330 4651 scope.go:117] "RemoveContainer" containerID="ad3cae171c58e84d579daa0a973b928c8399a9bddebdf4e609e13d2096f6f430" Oct 11 05:06:47 crc kubenswrapper[4651]: I1011 05:06:47.611020 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8554648995-jjb2l"] Oct 11 05:06:47 crc kubenswrapper[4651]: I1011 05:06:47.615183 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-8554648995-jjb2l"] Oct 11 05:06:47 crc kubenswrapper[4651]: I1011 05:06:47.628908 4651 scope.go:117] "RemoveContainer" containerID="e3662c435a44b690a621435e09bcb9a9f8c15449c7b7186c3e64396b403f29ac" Oct 11 05:06:47 crc kubenswrapper[4651]: E1011 05:06:47.629339 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e3662c435a44b690a621435e09bcb9a9f8c15449c7b7186c3e64396b403f29ac\": container with ID starting with e3662c435a44b690a621435e09bcb9a9f8c15449c7b7186c3e64396b403f29ac not found: ID does not exist" containerID="e3662c435a44b690a621435e09bcb9a9f8c15449c7b7186c3e64396b403f29ac" Oct 11 05:06:47 crc kubenswrapper[4651]: I1011 05:06:47.629360 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e3662c435a44b690a621435e09bcb9a9f8c15449c7b7186c3e64396b403f29ac"} err="failed to get container status \"e3662c435a44b690a621435e09bcb9a9f8c15449c7b7186c3e64396b403f29ac\": rpc error: code = NotFound desc = could not find container \"e3662c435a44b690a621435e09bcb9a9f8c15449c7b7186c3e64396b403f29ac\": container with ID starting with e3662c435a44b690a621435e09bcb9a9f8c15449c7b7186c3e64396b403f29ac not found: ID does not exist" Oct 11 05:06:47 crc kubenswrapper[4651]: I1011 05:06:47.629378 4651 scope.go:117] "RemoveContainer" containerID="ad3cae171c58e84d579daa0a973b928c8399a9bddebdf4e609e13d2096f6f430" Oct 11 05:06:47 crc kubenswrapper[4651]: E1011 05:06:47.629742 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ad3cae171c58e84d579daa0a973b928c8399a9bddebdf4e609e13d2096f6f430\": container with ID starting with ad3cae171c58e84d579daa0a973b928c8399a9bddebdf4e609e13d2096f6f430 not found: ID does not exist" containerID="ad3cae171c58e84d579daa0a973b928c8399a9bddebdf4e609e13d2096f6f430" Oct 11 05:06:47 crc kubenswrapper[4651]: I1011 05:06:47.629763 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ad3cae171c58e84d579daa0a973b928c8399a9bddebdf4e609e13d2096f6f430"} err="failed to get container status \"ad3cae171c58e84d579daa0a973b928c8399a9bddebdf4e609e13d2096f6f430\": rpc error: code = NotFound desc = could not find container \"ad3cae171c58e84d579daa0a973b928c8399a9bddebdf4e609e13d2096f6f430\": container with ID starting with ad3cae171c58e84d579daa0a973b928c8399a9bddebdf4e609e13d2096f6f430 not found: ID does not exist" Oct 11 05:06:47 crc kubenswrapper[4651]: I1011 05:06:47.634249 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-storage-0"] Oct 11 05:06:47 crc kubenswrapper[4651]: E1011 05:06:47.634615 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7f0c06fb-d21f-42e0-ad0d-a54ba7cea292" containerName="mariadb-database-create" Oct 11 05:06:47 crc kubenswrapper[4651]: I1011 05:06:47.634638 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="7f0c06fb-d21f-42e0-ad0d-a54ba7cea292" containerName="mariadb-database-create" Oct 11 05:06:47 crc kubenswrapper[4651]: E1011 05:06:47.634658 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="adb1281d-7bf8-4afa-ba64-989c39f857b2" containerName="dnsmasq-dns" Oct 11 05:06:47 crc kubenswrapper[4651]: I1011 05:06:47.634666 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="adb1281d-7bf8-4afa-ba64-989c39f857b2" containerName="dnsmasq-dns" Oct 11 05:06:47 crc kubenswrapper[4651]: E1011 05:06:47.634696 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b502fe10-f784-49d8-8b56-b3384c15f4f7" containerName="mariadb-database-create" Oct 11 05:06:47 crc kubenswrapper[4651]: I1011 05:06:47.634705 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="b502fe10-f784-49d8-8b56-b3384c15f4f7" containerName="mariadb-database-create" Oct 11 05:06:47 crc kubenswrapper[4651]: E1011 05:06:47.634723 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="adb1281d-7bf8-4afa-ba64-989c39f857b2" containerName="init" Oct 11 05:06:47 crc kubenswrapper[4651]: I1011 05:06:47.634731 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="adb1281d-7bf8-4afa-ba64-989c39f857b2" containerName="init" Oct 11 05:06:47 crc kubenswrapper[4651]: I1011 05:06:47.634935 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="b502fe10-f784-49d8-8b56-b3384c15f4f7" containerName="mariadb-database-create" Oct 11 05:06:47 crc kubenswrapper[4651]: I1011 05:06:47.634969 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="adb1281d-7bf8-4afa-ba64-989c39f857b2" containerName="dnsmasq-dns" Oct 11 05:06:47 crc kubenswrapper[4651]: I1011 05:06:47.634984 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="7f0c06fb-d21f-42e0-ad0d-a54ba7cea292" containerName="mariadb-database-create" Oct 11 05:06:47 crc kubenswrapper[4651]: I1011 05:06:47.641353 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Oct 11 05:06:47 crc kubenswrapper[4651]: I1011 05:06:47.643087 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-conf" Oct 11 05:06:47 crc kubenswrapper[4651]: I1011 05:06:47.643531 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-storage-config-data" Oct 11 05:06:47 crc kubenswrapper[4651]: I1011 05:06:47.644167 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-swift-dockercfg-gnt9g" Oct 11 05:06:47 crc kubenswrapper[4651]: I1011 05:06:47.644885 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-files" Oct 11 05:06:47 crc kubenswrapper[4651]: I1011 05:06:47.659543 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Oct 11 05:06:47 crc kubenswrapper[4651]: I1011 05:06:47.791293 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/aef9d930-4287-490f-85c7-5a791f985a77-lock\") pod \"swift-storage-0\" (UID: \"aef9d930-4287-490f-85c7-5a791f985a77\") " pod="openstack/swift-storage-0" Oct 11 05:06:47 crc kubenswrapper[4651]: I1011 05:06:47.792533 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/aef9d930-4287-490f-85c7-5a791f985a77-etc-swift\") pod \"swift-storage-0\" (UID: \"aef9d930-4287-490f-85c7-5a791f985a77\") " pod="openstack/swift-storage-0" Oct 11 05:06:47 crc kubenswrapper[4651]: I1011 05:06:47.792566 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"swift-storage-0\" (UID: \"aef9d930-4287-490f-85c7-5a791f985a77\") " pod="openstack/swift-storage-0" Oct 11 05:06:47 crc kubenswrapper[4651]: I1011 05:06:47.792627 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/aef9d930-4287-490f-85c7-5a791f985a77-cache\") pod \"swift-storage-0\" (UID: \"aef9d930-4287-490f-85c7-5a791f985a77\") " pod="openstack/swift-storage-0" Oct 11 05:06:47 crc kubenswrapper[4651]: I1011 05:06:47.792880 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dv9b7\" (UniqueName: \"kubernetes.io/projected/aef9d930-4287-490f-85c7-5a791f985a77-kube-api-access-dv9b7\") pod \"swift-storage-0\" (UID: \"aef9d930-4287-490f-85c7-5a791f985a77\") " pod="openstack/swift-storage-0" Oct 11 05:06:47 crc kubenswrapper[4651]: I1011 05:06:47.881774 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="adb1281d-7bf8-4afa-ba64-989c39f857b2" path="/var/lib/kubelet/pods/adb1281d-7bf8-4afa-ba64-989c39f857b2/volumes" Oct 11 05:06:47 crc kubenswrapper[4651]: I1011 05:06:47.895332 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/aef9d930-4287-490f-85c7-5a791f985a77-etc-swift\") pod \"swift-storage-0\" (UID: \"aef9d930-4287-490f-85c7-5a791f985a77\") " pod="openstack/swift-storage-0" Oct 11 05:06:47 crc kubenswrapper[4651]: I1011 05:06:47.895509 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"swift-storage-0\" (UID: \"aef9d930-4287-490f-85c7-5a791f985a77\") " pod="openstack/swift-storage-0" Oct 11 05:06:47 crc kubenswrapper[4651]: E1011 05:06:47.895555 4651 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Oct 11 05:06:47 crc kubenswrapper[4651]: E1011 05:06:47.895584 4651 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Oct 11 05:06:47 crc kubenswrapper[4651]: I1011 05:06:47.895616 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/aef9d930-4287-490f-85c7-5a791f985a77-cache\") pod \"swift-storage-0\" (UID: \"aef9d930-4287-490f-85c7-5a791f985a77\") " pod="openstack/swift-storage-0" Oct 11 05:06:47 crc kubenswrapper[4651]: E1011 05:06:47.895643 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/aef9d930-4287-490f-85c7-5a791f985a77-etc-swift podName:aef9d930-4287-490f-85c7-5a791f985a77 nodeName:}" failed. No retries permitted until 2025-10-11 05:06:48.395620577 +0000 UTC m=+929.291853383 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/aef9d930-4287-490f-85c7-5a791f985a77-etc-swift") pod "swift-storage-0" (UID: "aef9d930-4287-490f-85c7-5a791f985a77") : configmap "swift-ring-files" not found Oct 11 05:06:47 crc kubenswrapper[4651]: I1011 05:06:47.895770 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dv9b7\" (UniqueName: \"kubernetes.io/projected/aef9d930-4287-490f-85c7-5a791f985a77-kube-api-access-dv9b7\") pod \"swift-storage-0\" (UID: \"aef9d930-4287-490f-85c7-5a791f985a77\") " pod="openstack/swift-storage-0" Oct 11 05:06:47 crc kubenswrapper[4651]: I1011 05:06:47.896505 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/aef9d930-4287-490f-85c7-5a791f985a77-lock\") pod \"swift-storage-0\" (UID: \"aef9d930-4287-490f-85c7-5a791f985a77\") " pod="openstack/swift-storage-0" Oct 11 05:06:47 crc kubenswrapper[4651]: I1011 05:06:47.897030 4651 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"swift-storage-0\" (UID: \"aef9d930-4287-490f-85c7-5a791f985a77\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/swift-storage-0" Oct 11 05:06:47 crc kubenswrapper[4651]: I1011 05:06:47.897348 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/aef9d930-4287-490f-85c7-5a791f985a77-lock\") pod \"swift-storage-0\" (UID: \"aef9d930-4287-490f-85c7-5a791f985a77\") " pod="openstack/swift-storage-0" Oct 11 05:06:47 crc kubenswrapper[4651]: I1011 05:06:47.897691 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/aef9d930-4287-490f-85c7-5a791f985a77-cache\") pod \"swift-storage-0\" (UID: \"aef9d930-4287-490f-85c7-5a791f985a77\") " pod="openstack/swift-storage-0" Oct 11 05:06:47 crc kubenswrapper[4651]: I1011 05:06:47.923012 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dv9b7\" (UniqueName: \"kubernetes.io/projected/aef9d930-4287-490f-85c7-5a791f985a77-kube-api-access-dv9b7\") pod \"swift-storage-0\" (UID: \"aef9d930-4287-490f-85c7-5a791f985a77\") " pod="openstack/swift-storage-0" Oct 11 05:06:47 crc kubenswrapper[4651]: I1011 05:06:47.925028 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"swift-storage-0\" (UID: \"aef9d930-4287-490f-85c7-5a791f985a77\") " pod="openstack/swift-storage-0" Oct 11 05:06:48 crc kubenswrapper[4651]: I1011 05:06:48.123203 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-l2gmr"] Oct 11 05:06:48 crc kubenswrapper[4651]: I1011 05:06:48.124180 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-l2gmr" Oct 11 05:06:48 crc kubenswrapper[4651]: I1011 05:06:48.125606 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Oct 11 05:06:48 crc kubenswrapper[4651]: I1011 05:06:48.126962 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-config-data" Oct 11 05:06:48 crc kubenswrapper[4651]: I1011 05:06:48.127088 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-scripts" Oct 11 05:06:48 crc kubenswrapper[4651]: I1011 05:06:48.135740 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-l2gmr"] Oct 11 05:06:48 crc kubenswrapper[4651]: I1011 05:06:48.305089 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/35bb96b9-93d2-4ab5-a102-11f093a29144-scripts\") pod \"swift-ring-rebalance-l2gmr\" (UID: \"35bb96b9-93d2-4ab5-a102-11f093a29144\") " pod="openstack/swift-ring-rebalance-l2gmr" Oct 11 05:06:48 crc kubenswrapper[4651]: I1011 05:06:48.305191 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t9kr6\" (UniqueName: \"kubernetes.io/projected/35bb96b9-93d2-4ab5-a102-11f093a29144-kube-api-access-t9kr6\") pod \"swift-ring-rebalance-l2gmr\" (UID: \"35bb96b9-93d2-4ab5-a102-11f093a29144\") " pod="openstack/swift-ring-rebalance-l2gmr" Oct 11 05:06:48 crc kubenswrapper[4651]: I1011 05:06:48.305363 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/35bb96b9-93d2-4ab5-a102-11f093a29144-etc-swift\") pod \"swift-ring-rebalance-l2gmr\" (UID: \"35bb96b9-93d2-4ab5-a102-11f093a29144\") " pod="openstack/swift-ring-rebalance-l2gmr" Oct 11 05:06:48 crc kubenswrapper[4651]: I1011 05:06:48.305508 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/35bb96b9-93d2-4ab5-a102-11f093a29144-dispersionconf\") pod \"swift-ring-rebalance-l2gmr\" (UID: \"35bb96b9-93d2-4ab5-a102-11f093a29144\") " pod="openstack/swift-ring-rebalance-l2gmr" Oct 11 05:06:48 crc kubenswrapper[4651]: I1011 05:06:48.305727 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/35bb96b9-93d2-4ab5-a102-11f093a29144-swiftconf\") pod \"swift-ring-rebalance-l2gmr\" (UID: \"35bb96b9-93d2-4ab5-a102-11f093a29144\") " pod="openstack/swift-ring-rebalance-l2gmr" Oct 11 05:06:48 crc kubenswrapper[4651]: I1011 05:06:48.305769 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/35bb96b9-93d2-4ab5-a102-11f093a29144-ring-data-devices\") pod \"swift-ring-rebalance-l2gmr\" (UID: \"35bb96b9-93d2-4ab5-a102-11f093a29144\") " pod="openstack/swift-ring-rebalance-l2gmr" Oct 11 05:06:48 crc kubenswrapper[4651]: I1011 05:06:48.305800 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/35bb96b9-93d2-4ab5-a102-11f093a29144-combined-ca-bundle\") pod \"swift-ring-rebalance-l2gmr\" (UID: \"35bb96b9-93d2-4ab5-a102-11f093a29144\") " pod="openstack/swift-ring-rebalance-l2gmr" Oct 11 05:06:48 crc kubenswrapper[4651]: I1011 05:06:48.407863 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/35bb96b9-93d2-4ab5-a102-11f093a29144-etc-swift\") pod \"swift-ring-rebalance-l2gmr\" (UID: \"35bb96b9-93d2-4ab5-a102-11f093a29144\") " pod="openstack/swift-ring-rebalance-l2gmr" Oct 11 05:06:48 crc kubenswrapper[4651]: I1011 05:06:48.407965 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/aef9d930-4287-490f-85c7-5a791f985a77-etc-swift\") pod \"swift-storage-0\" (UID: \"aef9d930-4287-490f-85c7-5a791f985a77\") " pod="openstack/swift-storage-0" Oct 11 05:06:48 crc kubenswrapper[4651]: I1011 05:06:48.408014 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/35bb96b9-93d2-4ab5-a102-11f093a29144-dispersionconf\") pod \"swift-ring-rebalance-l2gmr\" (UID: \"35bb96b9-93d2-4ab5-a102-11f093a29144\") " pod="openstack/swift-ring-rebalance-l2gmr" Oct 11 05:06:48 crc kubenswrapper[4651]: I1011 05:06:48.408126 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/35bb96b9-93d2-4ab5-a102-11f093a29144-swiftconf\") pod \"swift-ring-rebalance-l2gmr\" (UID: \"35bb96b9-93d2-4ab5-a102-11f093a29144\") " pod="openstack/swift-ring-rebalance-l2gmr" Oct 11 05:06:48 crc kubenswrapper[4651]: I1011 05:06:48.408166 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/35bb96b9-93d2-4ab5-a102-11f093a29144-ring-data-devices\") pod \"swift-ring-rebalance-l2gmr\" (UID: \"35bb96b9-93d2-4ab5-a102-11f093a29144\") " pod="openstack/swift-ring-rebalance-l2gmr" Oct 11 05:06:48 crc kubenswrapper[4651]: I1011 05:06:48.408201 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/35bb96b9-93d2-4ab5-a102-11f093a29144-combined-ca-bundle\") pod \"swift-ring-rebalance-l2gmr\" (UID: \"35bb96b9-93d2-4ab5-a102-11f093a29144\") " pod="openstack/swift-ring-rebalance-l2gmr" Oct 11 05:06:48 crc kubenswrapper[4651]: E1011 05:06:48.408220 4651 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Oct 11 05:06:48 crc kubenswrapper[4651]: I1011 05:06:48.408246 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/35bb96b9-93d2-4ab5-a102-11f093a29144-scripts\") pod \"swift-ring-rebalance-l2gmr\" (UID: \"35bb96b9-93d2-4ab5-a102-11f093a29144\") " pod="openstack/swift-ring-rebalance-l2gmr" Oct 11 05:06:48 crc kubenswrapper[4651]: E1011 05:06:48.408254 4651 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Oct 11 05:06:48 crc kubenswrapper[4651]: E1011 05:06:48.408457 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/aef9d930-4287-490f-85c7-5a791f985a77-etc-swift podName:aef9d930-4287-490f-85c7-5a791f985a77 nodeName:}" failed. No retries permitted until 2025-10-11 05:06:49.408427045 +0000 UTC m=+930.304659861 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/aef9d930-4287-490f-85c7-5a791f985a77-etc-swift") pod "swift-storage-0" (UID: "aef9d930-4287-490f-85c7-5a791f985a77") : configmap "swift-ring-files" not found Oct 11 05:06:48 crc kubenswrapper[4651]: I1011 05:06:48.408881 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t9kr6\" (UniqueName: \"kubernetes.io/projected/35bb96b9-93d2-4ab5-a102-11f093a29144-kube-api-access-t9kr6\") pod \"swift-ring-rebalance-l2gmr\" (UID: \"35bb96b9-93d2-4ab5-a102-11f093a29144\") " pod="openstack/swift-ring-rebalance-l2gmr" Oct 11 05:06:48 crc kubenswrapper[4651]: I1011 05:06:48.408972 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/35bb96b9-93d2-4ab5-a102-11f093a29144-etc-swift\") pod \"swift-ring-rebalance-l2gmr\" (UID: \"35bb96b9-93d2-4ab5-a102-11f093a29144\") " pod="openstack/swift-ring-rebalance-l2gmr" Oct 11 05:06:48 crc kubenswrapper[4651]: I1011 05:06:48.409183 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/35bb96b9-93d2-4ab5-a102-11f093a29144-ring-data-devices\") pod \"swift-ring-rebalance-l2gmr\" (UID: \"35bb96b9-93d2-4ab5-a102-11f093a29144\") " pod="openstack/swift-ring-rebalance-l2gmr" Oct 11 05:06:48 crc kubenswrapper[4651]: I1011 05:06:48.409447 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/35bb96b9-93d2-4ab5-a102-11f093a29144-scripts\") pod \"swift-ring-rebalance-l2gmr\" (UID: \"35bb96b9-93d2-4ab5-a102-11f093a29144\") " pod="openstack/swift-ring-rebalance-l2gmr" Oct 11 05:06:48 crc kubenswrapper[4651]: I1011 05:06:48.412082 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/35bb96b9-93d2-4ab5-a102-11f093a29144-dispersionconf\") pod \"swift-ring-rebalance-l2gmr\" (UID: \"35bb96b9-93d2-4ab5-a102-11f093a29144\") " pod="openstack/swift-ring-rebalance-l2gmr" Oct 11 05:06:48 crc kubenswrapper[4651]: I1011 05:06:48.413326 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/35bb96b9-93d2-4ab5-a102-11f093a29144-swiftconf\") pod \"swift-ring-rebalance-l2gmr\" (UID: \"35bb96b9-93d2-4ab5-a102-11f093a29144\") " pod="openstack/swift-ring-rebalance-l2gmr" Oct 11 05:06:48 crc kubenswrapper[4651]: I1011 05:06:48.415144 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/35bb96b9-93d2-4ab5-a102-11f093a29144-combined-ca-bundle\") pod \"swift-ring-rebalance-l2gmr\" (UID: \"35bb96b9-93d2-4ab5-a102-11f093a29144\") " pod="openstack/swift-ring-rebalance-l2gmr" Oct 11 05:06:48 crc kubenswrapper[4651]: I1011 05:06:48.438517 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t9kr6\" (UniqueName: \"kubernetes.io/projected/35bb96b9-93d2-4ab5-a102-11f093a29144-kube-api-access-t9kr6\") pod \"swift-ring-rebalance-l2gmr\" (UID: \"35bb96b9-93d2-4ab5-a102-11f093a29144\") " pod="openstack/swift-ring-rebalance-l2gmr" Oct 11 05:06:48 crc kubenswrapper[4651]: I1011 05:06:48.444494 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-l2gmr" Oct 11 05:06:48 crc kubenswrapper[4651]: I1011 05:06:48.785549 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-l2gmr"] Oct 11 05:06:49 crc kubenswrapper[4651]: I1011 05:06:49.416918 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-8zck6"] Oct 11 05:06:49 crc kubenswrapper[4651]: I1011 05:06:49.418195 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-8zck6" Oct 11 05:06:49 crc kubenswrapper[4651]: I1011 05:06:49.427224 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/aef9d930-4287-490f-85c7-5a791f985a77-etc-swift\") pod \"swift-storage-0\" (UID: \"aef9d930-4287-490f-85c7-5a791f985a77\") " pod="openstack/swift-storage-0" Oct 11 05:06:49 crc kubenswrapper[4651]: E1011 05:06:49.427428 4651 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Oct 11 05:06:49 crc kubenswrapper[4651]: E1011 05:06:49.427451 4651 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Oct 11 05:06:49 crc kubenswrapper[4651]: E1011 05:06:49.427510 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/aef9d930-4287-490f-85c7-5a791f985a77-etc-swift podName:aef9d930-4287-490f-85c7-5a791f985a77 nodeName:}" failed. No retries permitted until 2025-10-11 05:06:51.427495717 +0000 UTC m=+932.323728513 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/aef9d930-4287-490f-85c7-5a791f985a77-etc-swift") pod "swift-storage-0" (UID: "aef9d930-4287-490f-85c7-5a791f985a77") : configmap "swift-ring-files" not found Oct 11 05:06:49 crc kubenswrapper[4651]: I1011 05:06:49.427864 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-8zck6"] Oct 11 05:06:49 crc kubenswrapper[4651]: I1011 05:06:49.529157 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pw7fp\" (UniqueName: \"kubernetes.io/projected/7890e4dd-435a-4c55-85a0-71bd4ccac0a6-kube-api-access-pw7fp\") pod \"glance-db-create-8zck6\" (UID: \"7890e4dd-435a-4c55-85a0-71bd4ccac0a6\") " pod="openstack/glance-db-create-8zck6" Oct 11 05:06:49 crc kubenswrapper[4651]: I1011 05:06:49.542330 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-l2gmr" event={"ID":"35bb96b9-93d2-4ab5-a102-11f093a29144","Type":"ContainerStarted","Data":"07576f478fd72949ec766c230f74ad425195287e15428fff81dc7552ddbfb82d"} Oct 11 05:06:49 crc kubenswrapper[4651]: I1011 05:06:49.630915 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pw7fp\" (UniqueName: \"kubernetes.io/projected/7890e4dd-435a-4c55-85a0-71bd4ccac0a6-kube-api-access-pw7fp\") pod \"glance-db-create-8zck6\" (UID: \"7890e4dd-435a-4c55-85a0-71bd4ccac0a6\") " pod="openstack/glance-db-create-8zck6" Oct 11 05:06:49 crc kubenswrapper[4651]: I1011 05:06:49.655503 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pw7fp\" (UniqueName: \"kubernetes.io/projected/7890e4dd-435a-4c55-85a0-71bd4ccac0a6-kube-api-access-pw7fp\") pod \"glance-db-create-8zck6\" (UID: \"7890e4dd-435a-4c55-85a0-71bd4ccac0a6\") " pod="openstack/glance-db-create-8zck6" Oct 11 05:06:49 crc kubenswrapper[4651]: I1011 05:06:49.738004 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-8zck6" Oct 11 05:06:50 crc kubenswrapper[4651]: I1011 05:06:50.214064 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-8zck6"] Oct 11 05:06:50 crc kubenswrapper[4651]: I1011 05:06:50.552795 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-8zck6" event={"ID":"7890e4dd-435a-4c55-85a0-71bd4ccac0a6","Type":"ContainerStarted","Data":"f324f7276a0ba5ca05ddf5c2b11b2a06e2c2bb368ed6aebfee867c8ee5cfcd37"} Oct 11 05:06:51 crc kubenswrapper[4651]: I1011 05:06:51.474134 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/aef9d930-4287-490f-85c7-5a791f985a77-etc-swift\") pod \"swift-storage-0\" (UID: \"aef9d930-4287-490f-85c7-5a791f985a77\") " pod="openstack/swift-storage-0" Oct 11 05:06:51 crc kubenswrapper[4651]: E1011 05:06:51.474309 4651 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Oct 11 05:06:51 crc kubenswrapper[4651]: E1011 05:06:51.474534 4651 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Oct 11 05:06:51 crc kubenswrapper[4651]: E1011 05:06:51.474584 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/aef9d930-4287-490f-85c7-5a791f985a77-etc-swift podName:aef9d930-4287-490f-85c7-5a791f985a77 nodeName:}" failed. No retries permitted until 2025-10-11 05:06:55.474567908 +0000 UTC m=+936.370800704 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/aef9d930-4287-490f-85c7-5a791f985a77-etc-swift") pod "swift-storage-0" (UID: "aef9d930-4287-490f-85c7-5a791f985a77") : configmap "swift-ring-files" not found Oct 11 05:06:53 crc kubenswrapper[4651]: I1011 05:06:53.577622 4651 generic.go:334] "Generic (PLEG): container finished" podID="64f28073-6c52-4fe5-8474-f976cd26edc0" containerID="d7aa98064ec9b79c4b78e88a94d3b9f892700455b99c5e666b6087eed94f4fcb" exitCode=0 Oct 11 05:06:53 crc kubenswrapper[4651]: I1011 05:06:53.578112 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-zvr5f" event={"ID":"64f28073-6c52-4fe5-8474-f976cd26edc0","Type":"ContainerDied","Data":"d7aa98064ec9b79c4b78e88a94d3b9f892700455b99c5e666b6087eed94f4fcb"} Oct 11 05:06:53 crc kubenswrapper[4651]: I1011 05:06:53.581034 4651 generic.go:334] "Generic (PLEG): container finished" podID="7890e4dd-435a-4c55-85a0-71bd4ccac0a6" containerID="a4992c024d0d86a07ee71184074329012f128a9e5697a8fbf9b50d02ed033b57" exitCode=0 Oct 11 05:06:53 crc kubenswrapper[4651]: I1011 05:06:53.581107 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-8zck6" event={"ID":"7890e4dd-435a-4c55-85a0-71bd4ccac0a6","Type":"ContainerDied","Data":"a4992c024d0d86a07ee71184074329012f128a9e5697a8fbf9b50d02ed033b57"} Oct 11 05:06:53 crc kubenswrapper[4651]: I1011 05:06:53.937762 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-a6cc-account-create-flc4v"] Oct 11 05:06:53 crc kubenswrapper[4651]: I1011 05:06:53.939106 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-a6cc-account-create-flc4v" Oct 11 05:06:53 crc kubenswrapper[4651]: I1011 05:06:53.940794 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Oct 11 05:06:53 crc kubenswrapper[4651]: I1011 05:06:53.978956 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-a6cc-account-create-flc4v"] Oct 11 05:06:54 crc kubenswrapper[4651]: I1011 05:06:54.114430 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kvdhz\" (UniqueName: \"kubernetes.io/projected/b6bfca2f-54e4-46e5-9c51-29f08bc730d1-kube-api-access-kvdhz\") pod \"keystone-a6cc-account-create-flc4v\" (UID: \"b6bfca2f-54e4-46e5-9c51-29f08bc730d1\") " pod="openstack/keystone-a6cc-account-create-flc4v" Oct 11 05:06:54 crc kubenswrapper[4651]: I1011 05:06:54.216558 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kvdhz\" (UniqueName: \"kubernetes.io/projected/b6bfca2f-54e4-46e5-9c51-29f08bc730d1-kube-api-access-kvdhz\") pod \"keystone-a6cc-account-create-flc4v\" (UID: \"b6bfca2f-54e4-46e5-9c51-29f08bc730d1\") " pod="openstack/keystone-a6cc-account-create-flc4v" Oct 11 05:06:54 crc kubenswrapper[4651]: I1011 05:06:54.235150 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kvdhz\" (UniqueName: \"kubernetes.io/projected/b6bfca2f-54e4-46e5-9c51-29f08bc730d1-kube-api-access-kvdhz\") pod \"keystone-a6cc-account-create-flc4v\" (UID: \"b6bfca2f-54e4-46e5-9c51-29f08bc730d1\") " pod="openstack/keystone-a6cc-account-create-flc4v" Oct 11 05:06:54 crc kubenswrapper[4651]: I1011 05:06:54.270627 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-a6cc-account-create-flc4v" Oct 11 05:06:54 crc kubenswrapper[4651]: I1011 05:06:54.312701 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-c468-account-create-zcb7t"] Oct 11 05:06:54 crc kubenswrapper[4651]: I1011 05:06:54.313707 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-c468-account-create-zcb7t" Oct 11 05:06:54 crc kubenswrapper[4651]: I1011 05:06:54.318400 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Oct 11 05:06:54 crc kubenswrapper[4651]: I1011 05:06:54.324080 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-c468-account-create-zcb7t"] Oct 11 05:06:54 crc kubenswrapper[4651]: I1011 05:06:54.420910 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ckrbd\" (UniqueName: \"kubernetes.io/projected/1898b88c-a558-4880-9502-11e9175555ea-kube-api-access-ckrbd\") pod \"placement-c468-account-create-zcb7t\" (UID: \"1898b88c-a558-4880-9502-11e9175555ea\") " pod="openstack/placement-c468-account-create-zcb7t" Oct 11 05:06:54 crc kubenswrapper[4651]: I1011 05:06:54.522504 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ckrbd\" (UniqueName: \"kubernetes.io/projected/1898b88c-a558-4880-9502-11e9175555ea-kube-api-access-ckrbd\") pod \"placement-c468-account-create-zcb7t\" (UID: \"1898b88c-a558-4880-9502-11e9175555ea\") " pod="openstack/placement-c468-account-create-zcb7t" Oct 11 05:06:54 crc kubenswrapper[4651]: I1011 05:06:54.540617 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ckrbd\" (UniqueName: \"kubernetes.io/projected/1898b88c-a558-4880-9502-11e9175555ea-kube-api-access-ckrbd\") pod \"placement-c468-account-create-zcb7t\" (UID: \"1898b88c-a558-4880-9502-11e9175555ea\") " pod="openstack/placement-c468-account-create-zcb7t" Oct 11 05:06:54 crc kubenswrapper[4651]: I1011 05:06:54.592043 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-zvr5f" event={"ID":"64f28073-6c52-4fe5-8474-f976cd26edc0","Type":"ContainerStarted","Data":"f3a6047fe5e61fedfb482f252d75988656a6f622f04e5734a0b333c33ab8319c"} Oct 11 05:06:54 crc kubenswrapper[4651]: I1011 05:06:54.592108 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-b8fbc5445-zvr5f" Oct 11 05:06:54 crc kubenswrapper[4651]: I1011 05:06:54.644191 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-c468-account-create-zcb7t" Oct 11 05:06:55 crc kubenswrapper[4651]: I1011 05:06:55.536565 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/aef9d930-4287-490f-85c7-5a791f985a77-etc-swift\") pod \"swift-storage-0\" (UID: \"aef9d930-4287-490f-85c7-5a791f985a77\") " pod="openstack/swift-storage-0" Oct 11 05:06:55 crc kubenswrapper[4651]: E1011 05:06:55.537151 4651 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Oct 11 05:06:55 crc kubenswrapper[4651]: E1011 05:06:55.537206 4651 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Oct 11 05:06:55 crc kubenswrapper[4651]: E1011 05:06:55.537304 4651 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/aef9d930-4287-490f-85c7-5a791f985a77-etc-swift podName:aef9d930-4287-490f-85c7-5a791f985a77 nodeName:}" failed. No retries permitted until 2025-10-11 05:07:03.537264467 +0000 UTC m=+944.433497293 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/aef9d930-4287-490f-85c7-5a791f985a77-etc-swift") pod "swift-storage-0" (UID: "aef9d930-4287-490f-85c7-5a791f985a77") : configmap "swift-ring-files" not found Oct 11 05:06:55 crc kubenswrapper[4651]: I1011 05:06:55.539028 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Oct 11 05:06:55 crc kubenswrapper[4651]: I1011 05:06:55.567577 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-b8fbc5445-zvr5f" podStartSLOduration=9.567556448 podStartE2EDuration="9.567556448s" podCreationTimestamp="2025-10-11 05:06:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 05:06:54.609230123 +0000 UTC m=+935.505462919" watchObservedRunningTime="2025-10-11 05:06:55.567556448 +0000 UTC m=+936.463789244" Oct 11 05:06:56 crc kubenswrapper[4651]: I1011 05:06:56.315785 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-8zck6" Oct 11 05:06:56 crc kubenswrapper[4651]: I1011 05:06:56.454534 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pw7fp\" (UniqueName: \"kubernetes.io/projected/7890e4dd-435a-4c55-85a0-71bd4ccac0a6-kube-api-access-pw7fp\") pod \"7890e4dd-435a-4c55-85a0-71bd4ccac0a6\" (UID: \"7890e4dd-435a-4c55-85a0-71bd4ccac0a6\") " Oct 11 05:06:56 crc kubenswrapper[4651]: I1011 05:06:56.460961 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7890e4dd-435a-4c55-85a0-71bd4ccac0a6-kube-api-access-pw7fp" (OuterVolumeSpecName: "kube-api-access-pw7fp") pod "7890e4dd-435a-4c55-85a0-71bd4ccac0a6" (UID: "7890e4dd-435a-4c55-85a0-71bd4ccac0a6"). InnerVolumeSpecName "kube-api-access-pw7fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:06:56 crc kubenswrapper[4651]: I1011 05:06:56.556297 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pw7fp\" (UniqueName: \"kubernetes.io/projected/7890e4dd-435a-4c55-85a0-71bd4ccac0a6-kube-api-access-pw7fp\") on node \"crc\" DevicePath \"\"" Oct 11 05:06:56 crc kubenswrapper[4651]: I1011 05:06:56.614737 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-8zck6" Oct 11 05:06:56 crc kubenswrapper[4651]: I1011 05:06:56.615312 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-8zck6" event={"ID":"7890e4dd-435a-4c55-85a0-71bd4ccac0a6","Type":"ContainerDied","Data":"f324f7276a0ba5ca05ddf5c2b11b2a06e2c2bb368ed6aebfee867c8ee5cfcd37"} Oct 11 05:06:56 crc kubenswrapper[4651]: I1011 05:06:56.615352 4651 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f324f7276a0ba5ca05ddf5c2b11b2a06e2c2bb368ed6aebfee867c8ee5cfcd37" Oct 11 05:06:56 crc kubenswrapper[4651]: I1011 05:06:56.622777 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-l2gmr" event={"ID":"35bb96b9-93d2-4ab5-a102-11f093a29144","Type":"ContainerStarted","Data":"6c3a4e4cba9f1e91fdf2cf55b1c86ae21afe1c7649e4ab24dd7dbbbc8f462c9b"} Oct 11 05:06:56 crc kubenswrapper[4651]: I1011 05:06:56.650566 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-a6cc-account-create-flc4v"] Oct 11 05:06:56 crc kubenswrapper[4651]: I1011 05:06:56.654434 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-ring-rebalance-l2gmr" podStartSLOduration=1.26413443 podStartE2EDuration="8.654416965s" podCreationTimestamp="2025-10-11 05:06:48 +0000 UTC" firstStartedPulling="2025-10-11 05:06:48.8055156 +0000 UTC m=+929.701748396" lastFinishedPulling="2025-10-11 05:06:56.195798125 +0000 UTC m=+937.092030931" observedRunningTime="2025-10-11 05:06:56.647069298 +0000 UTC m=+937.543302094" watchObservedRunningTime="2025-10-11 05:06:56.654416965 +0000 UTC m=+937.550649761" Oct 11 05:06:56 crc kubenswrapper[4651]: I1011 05:06:56.685974 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-c468-account-create-zcb7t"] Oct 11 05:06:56 crc kubenswrapper[4651]: W1011 05:06:56.689022 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1898b88c_a558_4880_9502_11e9175555ea.slice/crio-ded0b01323e0093ed5acd3f0f007e1b5fa41275de0753fbd43eafd95d23a4593 WatchSource:0}: Error finding container ded0b01323e0093ed5acd3f0f007e1b5fa41275de0753fbd43eafd95d23a4593: Status 404 returned error can't find the container with id ded0b01323e0093ed5acd3f0f007e1b5fa41275de0753fbd43eafd95d23a4593 Oct 11 05:06:57 crc kubenswrapper[4651]: I1011 05:06:57.662466 4651 generic.go:334] "Generic (PLEG): container finished" podID="1898b88c-a558-4880-9502-11e9175555ea" containerID="1c0792869a03f977cada24728a5defddd81033af56785de4c6b4e4194e821447" exitCode=0 Oct 11 05:06:57 crc kubenswrapper[4651]: I1011 05:06:57.662531 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-c468-account-create-zcb7t" event={"ID":"1898b88c-a558-4880-9502-11e9175555ea","Type":"ContainerDied","Data":"1c0792869a03f977cada24728a5defddd81033af56785de4c6b4e4194e821447"} Oct 11 05:06:57 crc kubenswrapper[4651]: I1011 05:06:57.662558 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-c468-account-create-zcb7t" event={"ID":"1898b88c-a558-4880-9502-11e9175555ea","Type":"ContainerStarted","Data":"ded0b01323e0093ed5acd3f0f007e1b5fa41275de0753fbd43eafd95d23a4593"} Oct 11 05:06:57 crc kubenswrapper[4651]: I1011 05:06:57.665002 4651 generic.go:334] "Generic (PLEG): container finished" podID="b6bfca2f-54e4-46e5-9c51-29f08bc730d1" containerID="811c4dd8df694f8ba0319b1f3d9bd303bb9c090463c57a39f44149b08756bb28" exitCode=0 Oct 11 05:06:57 crc kubenswrapper[4651]: I1011 05:06:57.665905 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-a6cc-account-create-flc4v" event={"ID":"b6bfca2f-54e4-46e5-9c51-29f08bc730d1","Type":"ContainerDied","Data":"811c4dd8df694f8ba0319b1f3d9bd303bb9c090463c57a39f44149b08756bb28"} Oct 11 05:06:57 crc kubenswrapper[4651]: I1011 05:06:57.665927 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-a6cc-account-create-flc4v" event={"ID":"b6bfca2f-54e4-46e5-9c51-29f08bc730d1","Type":"ContainerStarted","Data":"0860b950cfd54072142e3f2657cb9cf3e44091b4a17548765b209df8ec5409c9"} Oct 11 05:06:59 crc kubenswrapper[4651]: I1011 05:06:59.098137 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-a6cc-account-create-flc4v" Oct 11 05:06:59 crc kubenswrapper[4651]: I1011 05:06:59.103268 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-c468-account-create-zcb7t" Oct 11 05:06:59 crc kubenswrapper[4651]: I1011 05:06:59.210543 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ckrbd\" (UniqueName: \"kubernetes.io/projected/1898b88c-a558-4880-9502-11e9175555ea-kube-api-access-ckrbd\") pod \"1898b88c-a558-4880-9502-11e9175555ea\" (UID: \"1898b88c-a558-4880-9502-11e9175555ea\") " Oct 11 05:06:59 crc kubenswrapper[4651]: I1011 05:06:59.210982 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kvdhz\" (UniqueName: \"kubernetes.io/projected/b6bfca2f-54e4-46e5-9c51-29f08bc730d1-kube-api-access-kvdhz\") pod \"b6bfca2f-54e4-46e5-9c51-29f08bc730d1\" (UID: \"b6bfca2f-54e4-46e5-9c51-29f08bc730d1\") " Oct 11 05:06:59 crc kubenswrapper[4651]: I1011 05:06:59.216713 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1898b88c-a558-4880-9502-11e9175555ea-kube-api-access-ckrbd" (OuterVolumeSpecName: "kube-api-access-ckrbd") pod "1898b88c-a558-4880-9502-11e9175555ea" (UID: "1898b88c-a558-4880-9502-11e9175555ea"). InnerVolumeSpecName "kube-api-access-ckrbd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:06:59 crc kubenswrapper[4651]: I1011 05:06:59.216989 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6bfca2f-54e4-46e5-9c51-29f08bc730d1-kube-api-access-kvdhz" (OuterVolumeSpecName: "kube-api-access-kvdhz") pod "b6bfca2f-54e4-46e5-9c51-29f08bc730d1" (UID: "b6bfca2f-54e4-46e5-9c51-29f08bc730d1"). InnerVolumeSpecName "kube-api-access-kvdhz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:06:59 crc kubenswrapper[4651]: I1011 05:06:59.316145 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ckrbd\" (UniqueName: \"kubernetes.io/projected/1898b88c-a558-4880-9502-11e9175555ea-kube-api-access-ckrbd\") on node \"crc\" DevicePath \"\"" Oct 11 05:06:59 crc kubenswrapper[4651]: I1011 05:06:59.316195 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kvdhz\" (UniqueName: \"kubernetes.io/projected/b6bfca2f-54e4-46e5-9c51-29f08bc730d1-kube-api-access-kvdhz\") on node \"crc\" DevicePath \"\"" Oct 11 05:06:59 crc kubenswrapper[4651]: I1011 05:06:59.550418 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-1187-account-create-pmkbh"] Oct 11 05:06:59 crc kubenswrapper[4651]: E1011 05:06:59.550746 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b6bfca2f-54e4-46e5-9c51-29f08bc730d1" containerName="mariadb-account-create" Oct 11 05:06:59 crc kubenswrapper[4651]: I1011 05:06:59.550763 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="b6bfca2f-54e4-46e5-9c51-29f08bc730d1" containerName="mariadb-account-create" Oct 11 05:06:59 crc kubenswrapper[4651]: E1011 05:06:59.550780 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1898b88c-a558-4880-9502-11e9175555ea" containerName="mariadb-account-create" Oct 11 05:06:59 crc kubenswrapper[4651]: I1011 05:06:59.550788 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="1898b88c-a558-4880-9502-11e9175555ea" containerName="mariadb-account-create" Oct 11 05:06:59 crc kubenswrapper[4651]: E1011 05:06:59.550809 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7890e4dd-435a-4c55-85a0-71bd4ccac0a6" containerName="mariadb-database-create" Oct 11 05:06:59 crc kubenswrapper[4651]: I1011 05:06:59.550833 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="7890e4dd-435a-4c55-85a0-71bd4ccac0a6" containerName="mariadb-database-create" Oct 11 05:06:59 crc kubenswrapper[4651]: I1011 05:06:59.550969 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="7890e4dd-435a-4c55-85a0-71bd4ccac0a6" containerName="mariadb-database-create" Oct 11 05:06:59 crc kubenswrapper[4651]: I1011 05:06:59.550980 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="1898b88c-a558-4880-9502-11e9175555ea" containerName="mariadb-account-create" Oct 11 05:06:59 crc kubenswrapper[4651]: I1011 05:06:59.551014 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="b6bfca2f-54e4-46e5-9c51-29f08bc730d1" containerName="mariadb-account-create" Oct 11 05:06:59 crc kubenswrapper[4651]: I1011 05:06:59.551496 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-1187-account-create-pmkbh" Oct 11 05:06:59 crc kubenswrapper[4651]: I1011 05:06:59.554427 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Oct 11 05:06:59 crc kubenswrapper[4651]: I1011 05:06:59.559202 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-1187-account-create-pmkbh"] Oct 11 05:06:59 crc kubenswrapper[4651]: I1011 05:06:59.679458 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-a6cc-account-create-flc4v" Oct 11 05:06:59 crc kubenswrapper[4651]: I1011 05:06:59.679448 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-a6cc-account-create-flc4v" event={"ID":"b6bfca2f-54e4-46e5-9c51-29f08bc730d1","Type":"ContainerDied","Data":"0860b950cfd54072142e3f2657cb9cf3e44091b4a17548765b209df8ec5409c9"} Oct 11 05:06:59 crc kubenswrapper[4651]: I1011 05:06:59.679618 4651 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0860b950cfd54072142e3f2657cb9cf3e44091b4a17548765b209df8ec5409c9" Oct 11 05:06:59 crc kubenswrapper[4651]: I1011 05:06:59.681495 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-c468-account-create-zcb7t" event={"ID":"1898b88c-a558-4880-9502-11e9175555ea","Type":"ContainerDied","Data":"ded0b01323e0093ed5acd3f0f007e1b5fa41275de0753fbd43eafd95d23a4593"} Oct 11 05:06:59 crc kubenswrapper[4651]: I1011 05:06:59.681530 4651 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ded0b01323e0093ed5acd3f0f007e1b5fa41275de0753fbd43eafd95d23a4593" Oct 11 05:06:59 crc kubenswrapper[4651]: I1011 05:06:59.681538 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-c468-account-create-zcb7t" Oct 11 05:06:59 crc kubenswrapper[4651]: I1011 05:06:59.722367 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h2j26\" (UniqueName: \"kubernetes.io/projected/fa772ad4-2dcf-4af4-9cea-a0c7cb57557d-kube-api-access-h2j26\") pod \"glance-1187-account-create-pmkbh\" (UID: \"fa772ad4-2dcf-4af4-9cea-a0c7cb57557d\") " pod="openstack/glance-1187-account-create-pmkbh" Oct 11 05:06:59 crc kubenswrapper[4651]: I1011 05:06:59.823880 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h2j26\" (UniqueName: \"kubernetes.io/projected/fa772ad4-2dcf-4af4-9cea-a0c7cb57557d-kube-api-access-h2j26\") pod \"glance-1187-account-create-pmkbh\" (UID: \"fa772ad4-2dcf-4af4-9cea-a0c7cb57557d\") " pod="openstack/glance-1187-account-create-pmkbh" Oct 11 05:06:59 crc kubenswrapper[4651]: I1011 05:06:59.839990 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h2j26\" (UniqueName: \"kubernetes.io/projected/fa772ad4-2dcf-4af4-9cea-a0c7cb57557d-kube-api-access-h2j26\") pod \"glance-1187-account-create-pmkbh\" (UID: \"fa772ad4-2dcf-4af4-9cea-a0c7cb57557d\") " pod="openstack/glance-1187-account-create-pmkbh" Oct 11 05:06:59 crc kubenswrapper[4651]: I1011 05:06:59.867045 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-1187-account-create-pmkbh" Oct 11 05:07:00 crc kubenswrapper[4651]: I1011 05:07:00.332332 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-1187-account-create-pmkbh"] Oct 11 05:07:00 crc kubenswrapper[4651]: W1011 05:07:00.355576 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfa772ad4_2dcf_4af4_9cea_a0c7cb57557d.slice/crio-0771427f79f3751a4c0787cd541a4b4c8794100b828bd3d1e85163b719adc41f WatchSource:0}: Error finding container 0771427f79f3751a4c0787cd541a4b4c8794100b828bd3d1e85163b719adc41f: Status 404 returned error can't find the container with id 0771427f79f3751a4c0787cd541a4b4c8794100b828bd3d1e85163b719adc41f Oct 11 05:07:00 crc kubenswrapper[4651]: I1011 05:07:00.689674 4651 generic.go:334] "Generic (PLEG): container finished" podID="fa772ad4-2dcf-4af4-9cea-a0c7cb57557d" containerID="55b85f7500414ae562749bb5078422e7bb4280994c7a6aa499d10dbf080111cb" exitCode=0 Oct 11 05:07:00 crc kubenswrapper[4651]: I1011 05:07:00.689720 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-1187-account-create-pmkbh" event={"ID":"fa772ad4-2dcf-4af4-9cea-a0c7cb57557d","Type":"ContainerDied","Data":"55b85f7500414ae562749bb5078422e7bb4280994c7a6aa499d10dbf080111cb"} Oct 11 05:07:00 crc kubenswrapper[4651]: I1011 05:07:00.689751 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-1187-account-create-pmkbh" event={"ID":"fa772ad4-2dcf-4af4-9cea-a0c7cb57557d","Type":"ContainerStarted","Data":"0771427f79f3751a4c0787cd541a4b4c8794100b828bd3d1e85163b719adc41f"} Oct 11 05:07:01 crc kubenswrapper[4651]: I1011 05:07:01.805531 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-b8fbc5445-zvr5f" Oct 11 05:07:01 crc kubenswrapper[4651]: I1011 05:07:01.874482 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-89rdc"] Oct 11 05:07:01 crc kubenswrapper[4651]: I1011 05:07:01.874949 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-57d769cc4f-89rdc" podUID="ca84ab98-19fd-4cbf-ab28-acb4fced285f" containerName="dnsmasq-dns" containerID="cri-o://f938731219facf4908fc2a256b9eb9885190a59cfe0e357682dfd2ad6d729076" gracePeriod=10 Oct 11 05:07:02 crc kubenswrapper[4651]: I1011 05:07:02.168520 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-1187-account-create-pmkbh" Oct 11 05:07:02 crc kubenswrapper[4651]: I1011 05:07:02.273153 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h2j26\" (UniqueName: \"kubernetes.io/projected/fa772ad4-2dcf-4af4-9cea-a0c7cb57557d-kube-api-access-h2j26\") pod \"fa772ad4-2dcf-4af4-9cea-a0c7cb57557d\" (UID: \"fa772ad4-2dcf-4af4-9cea-a0c7cb57557d\") " Oct 11 05:07:02 crc kubenswrapper[4651]: I1011 05:07:02.280878 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fa772ad4-2dcf-4af4-9cea-a0c7cb57557d-kube-api-access-h2j26" (OuterVolumeSpecName: "kube-api-access-h2j26") pod "fa772ad4-2dcf-4af4-9cea-a0c7cb57557d" (UID: "fa772ad4-2dcf-4af4-9cea-a0c7cb57557d"). InnerVolumeSpecName "kube-api-access-h2j26". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:07:02 crc kubenswrapper[4651]: I1011 05:07:02.388748 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h2j26\" (UniqueName: \"kubernetes.io/projected/fa772ad4-2dcf-4af4-9cea-a0c7cb57557d-kube-api-access-h2j26\") on node \"crc\" DevicePath \"\"" Oct 11 05:07:02 crc kubenswrapper[4651]: I1011 05:07:02.493551 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-89rdc" Oct 11 05:07:02 crc kubenswrapper[4651]: I1011 05:07:02.597251 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ca84ab98-19fd-4cbf-ab28-acb4fced285f-config\") pod \"ca84ab98-19fd-4cbf-ab28-acb4fced285f\" (UID: \"ca84ab98-19fd-4cbf-ab28-acb4fced285f\") " Oct 11 05:07:02 crc kubenswrapper[4651]: I1011 05:07:02.597550 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vnw9t\" (UniqueName: \"kubernetes.io/projected/ca84ab98-19fd-4cbf-ab28-acb4fced285f-kube-api-access-vnw9t\") pod \"ca84ab98-19fd-4cbf-ab28-acb4fced285f\" (UID: \"ca84ab98-19fd-4cbf-ab28-acb4fced285f\") " Oct 11 05:07:02 crc kubenswrapper[4651]: I1011 05:07:02.597695 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ca84ab98-19fd-4cbf-ab28-acb4fced285f-dns-svc\") pod \"ca84ab98-19fd-4cbf-ab28-acb4fced285f\" (UID: \"ca84ab98-19fd-4cbf-ab28-acb4fced285f\") " Oct 11 05:07:02 crc kubenswrapper[4651]: I1011 05:07:02.601511 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ca84ab98-19fd-4cbf-ab28-acb4fced285f-kube-api-access-vnw9t" (OuterVolumeSpecName: "kube-api-access-vnw9t") pod "ca84ab98-19fd-4cbf-ab28-acb4fced285f" (UID: "ca84ab98-19fd-4cbf-ab28-acb4fced285f"). InnerVolumeSpecName "kube-api-access-vnw9t". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:07:02 crc kubenswrapper[4651]: I1011 05:07:02.632850 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ca84ab98-19fd-4cbf-ab28-acb4fced285f-config" (OuterVolumeSpecName: "config") pod "ca84ab98-19fd-4cbf-ab28-acb4fced285f" (UID: "ca84ab98-19fd-4cbf-ab28-acb4fced285f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:07:02 crc kubenswrapper[4651]: I1011 05:07:02.640245 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ca84ab98-19fd-4cbf-ab28-acb4fced285f-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "ca84ab98-19fd-4cbf-ab28-acb4fced285f" (UID: "ca84ab98-19fd-4cbf-ab28-acb4fced285f"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:07:02 crc kubenswrapper[4651]: I1011 05:07:02.699885 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vnw9t\" (UniqueName: \"kubernetes.io/projected/ca84ab98-19fd-4cbf-ab28-acb4fced285f-kube-api-access-vnw9t\") on node \"crc\" DevicePath \"\"" Oct 11 05:07:02 crc kubenswrapper[4651]: I1011 05:07:02.699909 4651 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ca84ab98-19fd-4cbf-ab28-acb4fced285f-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 11 05:07:02 crc kubenswrapper[4651]: I1011 05:07:02.699920 4651 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ca84ab98-19fd-4cbf-ab28-acb4fced285f-config\") on node \"crc\" DevicePath \"\"" Oct 11 05:07:02 crc kubenswrapper[4651]: I1011 05:07:02.708105 4651 generic.go:334] "Generic (PLEG): container finished" podID="ca84ab98-19fd-4cbf-ab28-acb4fced285f" containerID="f938731219facf4908fc2a256b9eb9885190a59cfe0e357682dfd2ad6d729076" exitCode=0 Oct 11 05:07:02 crc kubenswrapper[4651]: I1011 05:07:02.708231 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-89rdc" Oct 11 05:07:02 crc kubenswrapper[4651]: I1011 05:07:02.708543 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-89rdc" event={"ID":"ca84ab98-19fd-4cbf-ab28-acb4fced285f","Type":"ContainerDied","Data":"f938731219facf4908fc2a256b9eb9885190a59cfe0e357682dfd2ad6d729076"} Oct 11 05:07:02 crc kubenswrapper[4651]: I1011 05:07:02.708689 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-89rdc" event={"ID":"ca84ab98-19fd-4cbf-ab28-acb4fced285f","Type":"ContainerDied","Data":"a432887df2c9254a282706c7fa850b5ecd59398e60baf51fd3a84d8a8551f0bf"} Oct 11 05:07:02 crc kubenswrapper[4651]: I1011 05:07:02.708768 4651 scope.go:117] "RemoveContainer" containerID="f938731219facf4908fc2a256b9eb9885190a59cfe0e357682dfd2ad6d729076" Oct 11 05:07:02 crc kubenswrapper[4651]: I1011 05:07:02.711054 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-1187-account-create-pmkbh" event={"ID":"fa772ad4-2dcf-4af4-9cea-a0c7cb57557d","Type":"ContainerDied","Data":"0771427f79f3751a4c0787cd541a4b4c8794100b828bd3d1e85163b719adc41f"} Oct 11 05:07:02 crc kubenswrapper[4651]: I1011 05:07:02.711166 4651 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0771427f79f3751a4c0787cd541a4b4c8794100b828bd3d1e85163b719adc41f" Oct 11 05:07:02 crc kubenswrapper[4651]: I1011 05:07:02.712519 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-1187-account-create-pmkbh" Oct 11 05:07:02 crc kubenswrapper[4651]: I1011 05:07:02.742005 4651 scope.go:117] "RemoveContainer" containerID="2834835156f8ea76962c6759020fe737f7f6a025b380526fc167680cb68c1799" Oct 11 05:07:02 crc kubenswrapper[4651]: I1011 05:07:02.750648 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-89rdc"] Oct 11 05:07:02 crc kubenswrapper[4651]: I1011 05:07:02.757633 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-89rdc"] Oct 11 05:07:02 crc kubenswrapper[4651]: I1011 05:07:02.777717 4651 scope.go:117] "RemoveContainer" containerID="f938731219facf4908fc2a256b9eb9885190a59cfe0e357682dfd2ad6d729076" Oct 11 05:07:02 crc kubenswrapper[4651]: E1011 05:07:02.778231 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f938731219facf4908fc2a256b9eb9885190a59cfe0e357682dfd2ad6d729076\": container with ID starting with f938731219facf4908fc2a256b9eb9885190a59cfe0e357682dfd2ad6d729076 not found: ID does not exist" containerID="f938731219facf4908fc2a256b9eb9885190a59cfe0e357682dfd2ad6d729076" Oct 11 05:07:02 crc kubenswrapper[4651]: I1011 05:07:02.778267 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f938731219facf4908fc2a256b9eb9885190a59cfe0e357682dfd2ad6d729076"} err="failed to get container status \"f938731219facf4908fc2a256b9eb9885190a59cfe0e357682dfd2ad6d729076\": rpc error: code = NotFound desc = could not find container \"f938731219facf4908fc2a256b9eb9885190a59cfe0e357682dfd2ad6d729076\": container with ID starting with f938731219facf4908fc2a256b9eb9885190a59cfe0e357682dfd2ad6d729076 not found: ID does not exist" Oct 11 05:07:02 crc kubenswrapper[4651]: I1011 05:07:02.778294 4651 scope.go:117] "RemoveContainer" containerID="2834835156f8ea76962c6759020fe737f7f6a025b380526fc167680cb68c1799" Oct 11 05:07:02 crc kubenswrapper[4651]: E1011 05:07:02.778496 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2834835156f8ea76962c6759020fe737f7f6a025b380526fc167680cb68c1799\": container with ID starting with 2834835156f8ea76962c6759020fe737f7f6a025b380526fc167680cb68c1799 not found: ID does not exist" containerID="2834835156f8ea76962c6759020fe737f7f6a025b380526fc167680cb68c1799" Oct 11 05:07:02 crc kubenswrapper[4651]: I1011 05:07:02.778521 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2834835156f8ea76962c6759020fe737f7f6a025b380526fc167680cb68c1799"} err="failed to get container status \"2834835156f8ea76962c6759020fe737f7f6a025b380526fc167680cb68c1799\": rpc error: code = NotFound desc = could not find container \"2834835156f8ea76962c6759020fe737f7f6a025b380526fc167680cb68c1799\": container with ID starting with 2834835156f8ea76962c6759020fe737f7f6a025b380526fc167680cb68c1799 not found: ID does not exist" Oct 11 05:07:03 crc kubenswrapper[4651]: I1011 05:07:03.614595 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/aef9d930-4287-490f-85c7-5a791f985a77-etc-swift\") pod \"swift-storage-0\" (UID: \"aef9d930-4287-490f-85c7-5a791f985a77\") " pod="openstack/swift-storage-0" Oct 11 05:07:03 crc kubenswrapper[4651]: I1011 05:07:03.624123 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/aef9d930-4287-490f-85c7-5a791f985a77-etc-swift\") pod \"swift-storage-0\" (UID: \"aef9d930-4287-490f-85c7-5a791f985a77\") " pod="openstack/swift-storage-0" Oct 11 05:07:03 crc kubenswrapper[4651]: I1011 05:07:03.721353 4651 generic.go:334] "Generic (PLEG): container finished" podID="35bb96b9-93d2-4ab5-a102-11f093a29144" containerID="6c3a4e4cba9f1e91fdf2cf55b1c86ae21afe1c7649e4ab24dd7dbbbc8f462c9b" exitCode=0 Oct 11 05:07:03 crc kubenswrapper[4651]: I1011 05:07:03.721411 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-l2gmr" event={"ID":"35bb96b9-93d2-4ab5-a102-11f093a29144","Type":"ContainerDied","Data":"6c3a4e4cba9f1e91fdf2cf55b1c86ae21afe1c7649e4ab24dd7dbbbc8f462c9b"} Oct 11 05:07:03 crc kubenswrapper[4651]: I1011 05:07:03.866271 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Oct 11 05:07:03 crc kubenswrapper[4651]: I1011 05:07:03.877643 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ca84ab98-19fd-4cbf-ab28-acb4fced285f" path="/var/lib/kubelet/pods/ca84ab98-19fd-4cbf-ab28-acb4fced285f/volumes" Oct 11 05:07:04 crc kubenswrapper[4651]: I1011 05:07:04.427619 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Oct 11 05:07:04 crc kubenswrapper[4651]: I1011 05:07:04.690047 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-j5wrj"] Oct 11 05:07:04 crc kubenswrapper[4651]: E1011 05:07:04.690461 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa772ad4-2dcf-4af4-9cea-a0c7cb57557d" containerName="mariadb-account-create" Oct 11 05:07:04 crc kubenswrapper[4651]: I1011 05:07:04.690481 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa772ad4-2dcf-4af4-9cea-a0c7cb57557d" containerName="mariadb-account-create" Oct 11 05:07:04 crc kubenswrapper[4651]: E1011 05:07:04.690495 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ca84ab98-19fd-4cbf-ab28-acb4fced285f" containerName="init" Oct 11 05:07:04 crc kubenswrapper[4651]: I1011 05:07:04.690504 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="ca84ab98-19fd-4cbf-ab28-acb4fced285f" containerName="init" Oct 11 05:07:04 crc kubenswrapper[4651]: E1011 05:07:04.690535 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ca84ab98-19fd-4cbf-ab28-acb4fced285f" containerName="dnsmasq-dns" Oct 11 05:07:04 crc kubenswrapper[4651]: I1011 05:07:04.690547 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="ca84ab98-19fd-4cbf-ab28-acb4fced285f" containerName="dnsmasq-dns" Oct 11 05:07:04 crc kubenswrapper[4651]: I1011 05:07:04.690738 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="ca84ab98-19fd-4cbf-ab28-acb4fced285f" containerName="dnsmasq-dns" Oct 11 05:07:04 crc kubenswrapper[4651]: I1011 05:07:04.690753 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa772ad4-2dcf-4af4-9cea-a0c7cb57557d" containerName="mariadb-account-create" Oct 11 05:07:04 crc kubenswrapper[4651]: I1011 05:07:04.691346 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-j5wrj" Oct 11 05:07:04 crc kubenswrapper[4651]: I1011 05:07:04.694296 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Oct 11 05:07:04 crc kubenswrapper[4651]: I1011 05:07:04.701865 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-9wz7g" Oct 11 05:07:04 crc kubenswrapper[4651]: I1011 05:07:04.701808 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-j5wrj"] Oct 11 05:07:04 crc kubenswrapper[4651]: I1011 05:07:04.733813 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"aef9d930-4287-490f-85c7-5a791f985a77","Type":"ContainerStarted","Data":"158319c59c966487c64244f9362e9dd7e2ba425a5343fd01500acae9ce4e8f46"} Oct 11 05:07:04 crc kubenswrapper[4651]: I1011 05:07:04.832792 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/d7e37fff-86fd-435f-b124-27f7c2afb74d-db-sync-config-data\") pod \"glance-db-sync-j5wrj\" (UID: \"d7e37fff-86fd-435f-b124-27f7c2afb74d\") " pod="openstack/glance-db-sync-j5wrj" Oct 11 05:07:04 crc kubenswrapper[4651]: I1011 05:07:04.832843 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7e37fff-86fd-435f-b124-27f7c2afb74d-combined-ca-bundle\") pod \"glance-db-sync-j5wrj\" (UID: \"d7e37fff-86fd-435f-b124-27f7c2afb74d\") " pod="openstack/glance-db-sync-j5wrj" Oct 11 05:07:04 crc kubenswrapper[4651]: I1011 05:07:04.832932 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jcxlz\" (UniqueName: \"kubernetes.io/projected/d7e37fff-86fd-435f-b124-27f7c2afb74d-kube-api-access-jcxlz\") pod \"glance-db-sync-j5wrj\" (UID: \"d7e37fff-86fd-435f-b124-27f7c2afb74d\") " pod="openstack/glance-db-sync-j5wrj" Oct 11 05:07:04 crc kubenswrapper[4651]: I1011 05:07:04.832971 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d7e37fff-86fd-435f-b124-27f7c2afb74d-config-data\") pod \"glance-db-sync-j5wrj\" (UID: \"d7e37fff-86fd-435f-b124-27f7c2afb74d\") " pod="openstack/glance-db-sync-j5wrj" Oct 11 05:07:04 crc kubenswrapper[4651]: I1011 05:07:04.946056 4651 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-4gdx9" podUID="058302c4-d304-4a99-afbc-84a558968cfe" containerName="ovn-controller" probeResult="failure" output=< Oct 11 05:07:04 crc kubenswrapper[4651]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Oct 11 05:07:04 crc kubenswrapper[4651]: > Oct 11 05:07:04 crc kubenswrapper[4651]: I1011 05:07:04.950660 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jcxlz\" (UniqueName: \"kubernetes.io/projected/d7e37fff-86fd-435f-b124-27f7c2afb74d-kube-api-access-jcxlz\") pod \"glance-db-sync-j5wrj\" (UID: \"d7e37fff-86fd-435f-b124-27f7c2afb74d\") " pod="openstack/glance-db-sync-j5wrj" Oct 11 05:07:04 crc kubenswrapper[4651]: I1011 05:07:04.950729 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d7e37fff-86fd-435f-b124-27f7c2afb74d-config-data\") pod \"glance-db-sync-j5wrj\" (UID: \"d7e37fff-86fd-435f-b124-27f7c2afb74d\") " pod="openstack/glance-db-sync-j5wrj" Oct 11 05:07:04 crc kubenswrapper[4651]: I1011 05:07:04.950781 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/d7e37fff-86fd-435f-b124-27f7c2afb74d-db-sync-config-data\") pod \"glance-db-sync-j5wrj\" (UID: \"d7e37fff-86fd-435f-b124-27f7c2afb74d\") " pod="openstack/glance-db-sync-j5wrj" Oct 11 05:07:04 crc kubenswrapper[4651]: I1011 05:07:04.950800 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7e37fff-86fd-435f-b124-27f7c2afb74d-combined-ca-bundle\") pod \"glance-db-sync-j5wrj\" (UID: \"d7e37fff-86fd-435f-b124-27f7c2afb74d\") " pod="openstack/glance-db-sync-j5wrj" Oct 11 05:07:04 crc kubenswrapper[4651]: I1011 05:07:04.956742 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7e37fff-86fd-435f-b124-27f7c2afb74d-combined-ca-bundle\") pod \"glance-db-sync-j5wrj\" (UID: \"d7e37fff-86fd-435f-b124-27f7c2afb74d\") " pod="openstack/glance-db-sync-j5wrj" Oct 11 05:07:04 crc kubenswrapper[4651]: I1011 05:07:04.957585 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/d7e37fff-86fd-435f-b124-27f7c2afb74d-db-sync-config-data\") pod \"glance-db-sync-j5wrj\" (UID: \"d7e37fff-86fd-435f-b124-27f7c2afb74d\") " pod="openstack/glance-db-sync-j5wrj" Oct 11 05:07:04 crc kubenswrapper[4651]: I1011 05:07:04.957644 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d7e37fff-86fd-435f-b124-27f7c2afb74d-config-data\") pod \"glance-db-sync-j5wrj\" (UID: \"d7e37fff-86fd-435f-b124-27f7c2afb74d\") " pod="openstack/glance-db-sync-j5wrj" Oct 11 05:07:04 crc kubenswrapper[4651]: I1011 05:07:04.964741 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-2rjqb" Oct 11 05:07:04 crc kubenswrapper[4651]: I1011 05:07:04.975319 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jcxlz\" (UniqueName: \"kubernetes.io/projected/d7e37fff-86fd-435f-b124-27f7c2afb74d-kube-api-access-jcxlz\") pod \"glance-db-sync-j5wrj\" (UID: \"d7e37fff-86fd-435f-b124-27f7c2afb74d\") " pod="openstack/glance-db-sync-j5wrj" Oct 11 05:07:04 crc kubenswrapper[4651]: I1011 05:07:04.978655 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-2rjqb" Oct 11 05:07:05 crc kubenswrapper[4651]: I1011 05:07:05.009928 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-j5wrj" Oct 11 05:07:05 crc kubenswrapper[4651]: I1011 05:07:05.121670 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-l2gmr" Oct 11 05:07:05 crc kubenswrapper[4651]: I1011 05:07:05.155283 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t9kr6\" (UniqueName: \"kubernetes.io/projected/35bb96b9-93d2-4ab5-a102-11f093a29144-kube-api-access-t9kr6\") pod \"35bb96b9-93d2-4ab5-a102-11f093a29144\" (UID: \"35bb96b9-93d2-4ab5-a102-11f093a29144\") " Oct 11 05:07:05 crc kubenswrapper[4651]: I1011 05:07:05.155589 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/35bb96b9-93d2-4ab5-a102-11f093a29144-combined-ca-bundle\") pod \"35bb96b9-93d2-4ab5-a102-11f093a29144\" (UID: \"35bb96b9-93d2-4ab5-a102-11f093a29144\") " Oct 11 05:07:05 crc kubenswrapper[4651]: I1011 05:07:05.155636 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/35bb96b9-93d2-4ab5-a102-11f093a29144-ring-data-devices\") pod \"35bb96b9-93d2-4ab5-a102-11f093a29144\" (UID: \"35bb96b9-93d2-4ab5-a102-11f093a29144\") " Oct 11 05:07:05 crc kubenswrapper[4651]: I1011 05:07:05.155661 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/35bb96b9-93d2-4ab5-a102-11f093a29144-etc-swift\") pod \"35bb96b9-93d2-4ab5-a102-11f093a29144\" (UID: \"35bb96b9-93d2-4ab5-a102-11f093a29144\") " Oct 11 05:07:05 crc kubenswrapper[4651]: I1011 05:07:05.155689 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/35bb96b9-93d2-4ab5-a102-11f093a29144-swiftconf\") pod \"35bb96b9-93d2-4ab5-a102-11f093a29144\" (UID: \"35bb96b9-93d2-4ab5-a102-11f093a29144\") " Oct 11 05:07:05 crc kubenswrapper[4651]: I1011 05:07:05.155866 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/35bb96b9-93d2-4ab5-a102-11f093a29144-scripts\") pod \"35bb96b9-93d2-4ab5-a102-11f093a29144\" (UID: \"35bb96b9-93d2-4ab5-a102-11f093a29144\") " Oct 11 05:07:05 crc kubenswrapper[4651]: I1011 05:07:05.155890 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/35bb96b9-93d2-4ab5-a102-11f093a29144-dispersionconf\") pod \"35bb96b9-93d2-4ab5-a102-11f093a29144\" (UID: \"35bb96b9-93d2-4ab5-a102-11f093a29144\") " Oct 11 05:07:05 crc kubenswrapper[4651]: I1011 05:07:05.157340 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/35bb96b9-93d2-4ab5-a102-11f093a29144-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "35bb96b9-93d2-4ab5-a102-11f093a29144" (UID: "35bb96b9-93d2-4ab5-a102-11f093a29144"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:07:05 crc kubenswrapper[4651]: I1011 05:07:05.161071 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/35bb96b9-93d2-4ab5-a102-11f093a29144-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "35bb96b9-93d2-4ab5-a102-11f093a29144" (UID: "35bb96b9-93d2-4ab5-a102-11f093a29144"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 05:07:05 crc kubenswrapper[4651]: I1011 05:07:05.179231 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/35bb96b9-93d2-4ab5-a102-11f093a29144-kube-api-access-t9kr6" (OuterVolumeSpecName: "kube-api-access-t9kr6") pod "35bb96b9-93d2-4ab5-a102-11f093a29144" (UID: "35bb96b9-93d2-4ab5-a102-11f093a29144"). InnerVolumeSpecName "kube-api-access-t9kr6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:07:05 crc kubenswrapper[4651]: I1011 05:07:05.201060 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/35bb96b9-93d2-4ab5-a102-11f093a29144-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "35bb96b9-93d2-4ab5-a102-11f093a29144" (UID: "35bb96b9-93d2-4ab5-a102-11f093a29144"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:07:05 crc kubenswrapper[4651]: I1011 05:07:05.208340 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/35bb96b9-93d2-4ab5-a102-11f093a29144-scripts" (OuterVolumeSpecName: "scripts") pod "35bb96b9-93d2-4ab5-a102-11f093a29144" (UID: "35bb96b9-93d2-4ab5-a102-11f093a29144"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:07:05 crc kubenswrapper[4651]: I1011 05:07:05.223003 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/35bb96b9-93d2-4ab5-a102-11f093a29144-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "35bb96b9-93d2-4ab5-a102-11f093a29144" (UID: "35bb96b9-93d2-4ab5-a102-11f093a29144"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:07:05 crc kubenswrapper[4651]: I1011 05:07:05.259097 4651 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/35bb96b9-93d2-4ab5-a102-11f093a29144-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 05:07:05 crc kubenswrapper[4651]: I1011 05:07:05.259123 4651 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/35bb96b9-93d2-4ab5-a102-11f093a29144-ring-data-devices\") on node \"crc\" DevicePath \"\"" Oct 11 05:07:05 crc kubenswrapper[4651]: I1011 05:07:05.259132 4651 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/35bb96b9-93d2-4ab5-a102-11f093a29144-etc-swift\") on node \"crc\" DevicePath \"\"" Oct 11 05:07:05 crc kubenswrapper[4651]: I1011 05:07:05.259142 4651 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/35bb96b9-93d2-4ab5-a102-11f093a29144-scripts\") on node \"crc\" DevicePath \"\"" Oct 11 05:07:05 crc kubenswrapper[4651]: I1011 05:07:05.259152 4651 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/35bb96b9-93d2-4ab5-a102-11f093a29144-dispersionconf\") on node \"crc\" DevicePath \"\"" Oct 11 05:07:05 crc kubenswrapper[4651]: I1011 05:07:05.259160 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t9kr6\" (UniqueName: \"kubernetes.io/projected/35bb96b9-93d2-4ab5-a102-11f093a29144-kube-api-access-t9kr6\") on node \"crc\" DevicePath \"\"" Oct 11 05:07:05 crc kubenswrapper[4651]: I1011 05:07:05.262960 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/35bb96b9-93d2-4ab5-a102-11f093a29144-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "35bb96b9-93d2-4ab5-a102-11f093a29144" (UID: "35bb96b9-93d2-4ab5-a102-11f093a29144"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:07:05 crc kubenswrapper[4651]: I1011 05:07:05.306534 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-4gdx9-config-5dcn9"] Oct 11 05:07:05 crc kubenswrapper[4651]: E1011 05:07:05.306976 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35bb96b9-93d2-4ab5-a102-11f093a29144" containerName="swift-ring-rebalance" Oct 11 05:07:05 crc kubenswrapper[4651]: I1011 05:07:05.306998 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="35bb96b9-93d2-4ab5-a102-11f093a29144" containerName="swift-ring-rebalance" Oct 11 05:07:05 crc kubenswrapper[4651]: I1011 05:07:05.307177 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="35bb96b9-93d2-4ab5-a102-11f093a29144" containerName="swift-ring-rebalance" Oct 11 05:07:05 crc kubenswrapper[4651]: I1011 05:07:05.307654 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-4gdx9-config-5dcn9" Oct 11 05:07:05 crc kubenswrapper[4651]: I1011 05:07:05.312845 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Oct 11 05:07:05 crc kubenswrapper[4651]: I1011 05:07:05.333641 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-4gdx9-config-5dcn9"] Oct 11 05:07:05 crc kubenswrapper[4651]: I1011 05:07:05.360697 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-csd96\" (UniqueName: \"kubernetes.io/projected/6e689799-040b-4ac2-bfda-91726bc08ce8-kube-api-access-csd96\") pod \"ovn-controller-4gdx9-config-5dcn9\" (UID: \"6e689799-040b-4ac2-bfda-91726bc08ce8\") " pod="openstack/ovn-controller-4gdx9-config-5dcn9" Oct 11 05:07:05 crc kubenswrapper[4651]: I1011 05:07:05.360780 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/6e689799-040b-4ac2-bfda-91726bc08ce8-var-run-ovn\") pod \"ovn-controller-4gdx9-config-5dcn9\" (UID: \"6e689799-040b-4ac2-bfda-91726bc08ce8\") " pod="openstack/ovn-controller-4gdx9-config-5dcn9" Oct 11 05:07:05 crc kubenswrapper[4651]: I1011 05:07:05.360841 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/6e689799-040b-4ac2-bfda-91726bc08ce8-var-run\") pod \"ovn-controller-4gdx9-config-5dcn9\" (UID: \"6e689799-040b-4ac2-bfda-91726bc08ce8\") " pod="openstack/ovn-controller-4gdx9-config-5dcn9" Oct 11 05:07:05 crc kubenswrapper[4651]: I1011 05:07:05.360884 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/6e689799-040b-4ac2-bfda-91726bc08ce8-var-log-ovn\") pod \"ovn-controller-4gdx9-config-5dcn9\" (UID: \"6e689799-040b-4ac2-bfda-91726bc08ce8\") " pod="openstack/ovn-controller-4gdx9-config-5dcn9" Oct 11 05:07:05 crc kubenswrapper[4651]: I1011 05:07:05.360964 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6e689799-040b-4ac2-bfda-91726bc08ce8-scripts\") pod \"ovn-controller-4gdx9-config-5dcn9\" (UID: \"6e689799-040b-4ac2-bfda-91726bc08ce8\") " pod="openstack/ovn-controller-4gdx9-config-5dcn9" Oct 11 05:07:05 crc kubenswrapper[4651]: I1011 05:07:05.360985 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/6e689799-040b-4ac2-bfda-91726bc08ce8-additional-scripts\") pod \"ovn-controller-4gdx9-config-5dcn9\" (UID: \"6e689799-040b-4ac2-bfda-91726bc08ce8\") " pod="openstack/ovn-controller-4gdx9-config-5dcn9" Oct 11 05:07:05 crc kubenswrapper[4651]: I1011 05:07:05.361174 4651 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/35bb96b9-93d2-4ab5-a102-11f093a29144-swiftconf\") on node \"crc\" DevicePath \"\"" Oct 11 05:07:05 crc kubenswrapper[4651]: I1011 05:07:05.462520 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6e689799-040b-4ac2-bfda-91726bc08ce8-scripts\") pod \"ovn-controller-4gdx9-config-5dcn9\" (UID: \"6e689799-040b-4ac2-bfda-91726bc08ce8\") " pod="openstack/ovn-controller-4gdx9-config-5dcn9" Oct 11 05:07:05 crc kubenswrapper[4651]: I1011 05:07:05.462560 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/6e689799-040b-4ac2-bfda-91726bc08ce8-additional-scripts\") pod \"ovn-controller-4gdx9-config-5dcn9\" (UID: \"6e689799-040b-4ac2-bfda-91726bc08ce8\") " pod="openstack/ovn-controller-4gdx9-config-5dcn9" Oct 11 05:07:05 crc kubenswrapper[4651]: I1011 05:07:05.462629 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-csd96\" (UniqueName: \"kubernetes.io/projected/6e689799-040b-4ac2-bfda-91726bc08ce8-kube-api-access-csd96\") pod \"ovn-controller-4gdx9-config-5dcn9\" (UID: \"6e689799-040b-4ac2-bfda-91726bc08ce8\") " pod="openstack/ovn-controller-4gdx9-config-5dcn9" Oct 11 05:07:05 crc kubenswrapper[4651]: I1011 05:07:05.462660 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/6e689799-040b-4ac2-bfda-91726bc08ce8-var-run-ovn\") pod \"ovn-controller-4gdx9-config-5dcn9\" (UID: \"6e689799-040b-4ac2-bfda-91726bc08ce8\") " pod="openstack/ovn-controller-4gdx9-config-5dcn9" Oct 11 05:07:05 crc kubenswrapper[4651]: I1011 05:07:05.462693 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/6e689799-040b-4ac2-bfda-91726bc08ce8-var-run\") pod \"ovn-controller-4gdx9-config-5dcn9\" (UID: \"6e689799-040b-4ac2-bfda-91726bc08ce8\") " pod="openstack/ovn-controller-4gdx9-config-5dcn9" Oct 11 05:07:05 crc kubenswrapper[4651]: I1011 05:07:05.462726 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/6e689799-040b-4ac2-bfda-91726bc08ce8-var-log-ovn\") pod \"ovn-controller-4gdx9-config-5dcn9\" (UID: \"6e689799-040b-4ac2-bfda-91726bc08ce8\") " pod="openstack/ovn-controller-4gdx9-config-5dcn9" Oct 11 05:07:05 crc kubenswrapper[4651]: I1011 05:07:05.465539 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/6e689799-040b-4ac2-bfda-91726bc08ce8-var-log-ovn\") pod \"ovn-controller-4gdx9-config-5dcn9\" (UID: \"6e689799-040b-4ac2-bfda-91726bc08ce8\") " pod="openstack/ovn-controller-4gdx9-config-5dcn9" Oct 11 05:07:05 crc kubenswrapper[4651]: I1011 05:07:05.465598 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/6e689799-040b-4ac2-bfda-91726bc08ce8-var-run-ovn\") pod \"ovn-controller-4gdx9-config-5dcn9\" (UID: \"6e689799-040b-4ac2-bfda-91726bc08ce8\") " pod="openstack/ovn-controller-4gdx9-config-5dcn9" Oct 11 05:07:05 crc kubenswrapper[4651]: I1011 05:07:05.465778 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/6e689799-040b-4ac2-bfda-91726bc08ce8-var-run\") pod \"ovn-controller-4gdx9-config-5dcn9\" (UID: \"6e689799-040b-4ac2-bfda-91726bc08ce8\") " pod="openstack/ovn-controller-4gdx9-config-5dcn9" Oct 11 05:07:05 crc kubenswrapper[4651]: I1011 05:07:05.466494 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6e689799-040b-4ac2-bfda-91726bc08ce8-scripts\") pod \"ovn-controller-4gdx9-config-5dcn9\" (UID: \"6e689799-040b-4ac2-bfda-91726bc08ce8\") " pod="openstack/ovn-controller-4gdx9-config-5dcn9" Oct 11 05:07:05 crc kubenswrapper[4651]: I1011 05:07:05.466571 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/6e689799-040b-4ac2-bfda-91726bc08ce8-additional-scripts\") pod \"ovn-controller-4gdx9-config-5dcn9\" (UID: \"6e689799-040b-4ac2-bfda-91726bc08ce8\") " pod="openstack/ovn-controller-4gdx9-config-5dcn9" Oct 11 05:07:05 crc kubenswrapper[4651]: I1011 05:07:05.485576 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-csd96\" (UniqueName: \"kubernetes.io/projected/6e689799-040b-4ac2-bfda-91726bc08ce8-kube-api-access-csd96\") pod \"ovn-controller-4gdx9-config-5dcn9\" (UID: \"6e689799-040b-4ac2-bfda-91726bc08ce8\") " pod="openstack/ovn-controller-4gdx9-config-5dcn9" Oct 11 05:07:05 crc kubenswrapper[4651]: I1011 05:07:05.630286 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-4gdx9-config-5dcn9" Oct 11 05:07:05 crc kubenswrapper[4651]: I1011 05:07:05.630349 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-j5wrj"] Oct 11 05:07:05 crc kubenswrapper[4651]: I1011 05:07:05.753543 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-l2gmr" Oct 11 05:07:05 crc kubenswrapper[4651]: I1011 05:07:05.754723 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-l2gmr" event={"ID":"35bb96b9-93d2-4ab5-a102-11f093a29144","Type":"ContainerDied","Data":"07576f478fd72949ec766c230f74ad425195287e15428fff81dc7552ddbfb82d"} Oct 11 05:07:05 crc kubenswrapper[4651]: I1011 05:07:05.754848 4651 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="07576f478fd72949ec766c230f74ad425195287e15428fff81dc7552ddbfb82d" Oct 11 05:07:05 crc kubenswrapper[4651]: W1011 05:07:05.884344 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd7e37fff_86fd_435f_b124_27f7c2afb74d.slice/crio-be55398c99da0ad1dcb630915ff5194cf820f690396a8718af05e6dd1ac74902 WatchSource:0}: Error finding container be55398c99da0ad1dcb630915ff5194cf820f690396a8718af05e6dd1ac74902: Status 404 returned error can't find the container with id be55398c99da0ad1dcb630915ff5194cf820f690396a8718af05e6dd1ac74902 Oct 11 05:07:06 crc kubenswrapper[4651]: I1011 05:07:06.360353 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-4gdx9-config-5dcn9"] Oct 11 05:07:06 crc kubenswrapper[4651]: W1011 05:07:06.374662 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6e689799_040b_4ac2_bfda_91726bc08ce8.slice/crio-e370b5ff406804facca5acdba48614b59d57b9d03166a142989f0b34787596d5 WatchSource:0}: Error finding container e370b5ff406804facca5acdba48614b59d57b9d03166a142989f0b34787596d5: Status 404 returned error can't find the container with id e370b5ff406804facca5acdba48614b59d57b9d03166a142989f0b34787596d5 Oct 11 05:07:06 crc kubenswrapper[4651]: I1011 05:07:06.762519 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"aef9d930-4287-490f-85c7-5a791f985a77","Type":"ContainerStarted","Data":"ebcb5bd9a5b34eb297793ee135487d0d670df4287102513e17ddb65d313851a8"} Oct 11 05:07:06 crc kubenswrapper[4651]: I1011 05:07:06.762914 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"aef9d930-4287-490f-85c7-5a791f985a77","Type":"ContainerStarted","Data":"4b1d23335e67c73df411da41133b573f89a47727536691d607a93d3e506f65ed"} Oct 11 05:07:06 crc kubenswrapper[4651]: I1011 05:07:06.762927 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"aef9d930-4287-490f-85c7-5a791f985a77","Type":"ContainerStarted","Data":"a571c947f9a57494fbfa0464003173bdff8b0543f74668253ea8c27c8742d3d5"} Oct 11 05:07:06 crc kubenswrapper[4651]: I1011 05:07:06.762937 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"aef9d930-4287-490f-85c7-5a791f985a77","Type":"ContainerStarted","Data":"b1a17cd8b8cdd5d2ff214b443e8f1c9395eb99e0405fdaf9934092087492054f"} Oct 11 05:07:06 crc kubenswrapper[4651]: I1011 05:07:06.764379 4651 generic.go:334] "Generic (PLEG): container finished" podID="e73b125b-a52b-44bb-bbed-3a484f53a9cb" containerID="d0718945f619181b6b1f3000cd1eaed2003512a29de9e8f1b43985057d9b10d7" exitCode=0 Oct 11 05:07:06 crc kubenswrapper[4651]: I1011 05:07:06.764432 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"e73b125b-a52b-44bb-bbed-3a484f53a9cb","Type":"ContainerDied","Data":"d0718945f619181b6b1f3000cd1eaed2003512a29de9e8f1b43985057d9b10d7"} Oct 11 05:07:06 crc kubenswrapper[4651]: I1011 05:07:06.771948 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-4gdx9-config-5dcn9" event={"ID":"6e689799-040b-4ac2-bfda-91726bc08ce8","Type":"ContainerStarted","Data":"3d1126b6ea5739b8acbd5810cfc36396dde69f981fb91d4145cf4f977590a3f5"} Oct 11 05:07:06 crc kubenswrapper[4651]: I1011 05:07:06.771986 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-4gdx9-config-5dcn9" event={"ID":"6e689799-040b-4ac2-bfda-91726bc08ce8","Type":"ContainerStarted","Data":"e370b5ff406804facca5acdba48614b59d57b9d03166a142989f0b34787596d5"} Oct 11 05:07:06 crc kubenswrapper[4651]: I1011 05:07:06.780948 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-j5wrj" event={"ID":"d7e37fff-86fd-435f-b124-27f7c2afb74d","Type":"ContainerStarted","Data":"be55398c99da0ad1dcb630915ff5194cf820f690396a8718af05e6dd1ac74902"} Oct 11 05:07:06 crc kubenswrapper[4651]: I1011 05:07:06.786184 4651 generic.go:334] "Generic (PLEG): container finished" podID="dbfeee44-d2ad-4a4b-814f-916176925aaf" containerID="c50a4b72004db94f4ca1044699f14bb5386093354b3b69c9543ac53d702d890e" exitCode=0 Oct 11 05:07:06 crc kubenswrapper[4651]: I1011 05:07:06.786208 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"dbfeee44-d2ad-4a4b-814f-916176925aaf","Type":"ContainerDied","Data":"c50a4b72004db94f4ca1044699f14bb5386093354b3b69c9543ac53d702d890e"} Oct 11 05:07:06 crc kubenswrapper[4651]: I1011 05:07:06.861447 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-4gdx9-config-5dcn9" podStartSLOduration=1.861428975 podStartE2EDuration="1.861428975s" podCreationTimestamp="2025-10-11 05:07:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 05:07:06.861176778 +0000 UTC m=+947.757409574" watchObservedRunningTime="2025-10-11 05:07:06.861428975 +0000 UTC m=+947.757661771" Oct 11 05:07:07 crc kubenswrapper[4651]: I1011 05:07:07.797445 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"e73b125b-a52b-44bb-bbed-3a484f53a9cb","Type":"ContainerStarted","Data":"59c32683a820f73d4e339008dae95e311ab157d08bc2237c1620fcf487d08772"} Oct 11 05:07:07 crc kubenswrapper[4651]: I1011 05:07:07.797955 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Oct 11 05:07:07 crc kubenswrapper[4651]: I1011 05:07:07.800371 4651 generic.go:334] "Generic (PLEG): container finished" podID="6e689799-040b-4ac2-bfda-91726bc08ce8" containerID="3d1126b6ea5739b8acbd5810cfc36396dde69f981fb91d4145cf4f977590a3f5" exitCode=0 Oct 11 05:07:07 crc kubenswrapper[4651]: I1011 05:07:07.800416 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-4gdx9-config-5dcn9" event={"ID":"6e689799-040b-4ac2-bfda-91726bc08ce8","Type":"ContainerDied","Data":"3d1126b6ea5739b8acbd5810cfc36396dde69f981fb91d4145cf4f977590a3f5"} Oct 11 05:07:07 crc kubenswrapper[4651]: I1011 05:07:07.802663 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"dbfeee44-d2ad-4a4b-814f-916176925aaf","Type":"ContainerStarted","Data":"b59dfb8a86a393f1bb73d7fab6f1f3d33ac444d6555c3ef6692794f083ddbdfc"} Oct 11 05:07:07 crc kubenswrapper[4651]: I1011 05:07:07.803102 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Oct 11 05:07:07 crc kubenswrapper[4651]: I1011 05:07:07.827774 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=51.754928127 podStartE2EDuration="58.827752474s" podCreationTimestamp="2025-10-11 05:06:09 +0000 UTC" firstStartedPulling="2025-10-11 05:06:24.736385863 +0000 UTC m=+905.632618659" lastFinishedPulling="2025-10-11 05:06:31.80921021 +0000 UTC m=+912.705443006" observedRunningTime="2025-10-11 05:07:07.816256982 +0000 UTC m=+948.712489788" watchObservedRunningTime="2025-10-11 05:07:07.827752474 +0000 UTC m=+948.723985270" Oct 11 05:07:07 crc kubenswrapper[4651]: I1011 05:07:07.869060 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=51.26102554 podStartE2EDuration="58.869038295s" podCreationTimestamp="2025-10-11 05:06:09 +0000 UTC" firstStartedPulling="2025-10-11 05:06:24.06819717 +0000 UTC m=+904.964429966" lastFinishedPulling="2025-10-11 05:06:31.676209925 +0000 UTC m=+912.572442721" observedRunningTime="2025-10-11 05:07:07.861017801 +0000 UTC m=+948.757250617" watchObservedRunningTime="2025-10-11 05:07:07.869038295 +0000 UTC m=+948.765271101" Oct 11 05:07:08 crc kubenswrapper[4651]: I1011 05:07:08.816998 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"aef9d930-4287-490f-85c7-5a791f985a77","Type":"ContainerStarted","Data":"8b77b0fc8d4539f9d9f54baf5bd8f1dc2b32f7ef9b2da4225ea462e55859564a"} Oct 11 05:07:08 crc kubenswrapper[4651]: I1011 05:07:08.817042 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"aef9d930-4287-490f-85c7-5a791f985a77","Type":"ContainerStarted","Data":"e7c365315d5f4d3c56abd4b81330139711bb2b50ce08f3d5a892c558b5a61246"} Oct 11 05:07:08 crc kubenswrapper[4651]: I1011 05:07:08.817052 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"aef9d930-4287-490f-85c7-5a791f985a77","Type":"ContainerStarted","Data":"87e9a9bde360de0e5cd087f2b324280d2fef0a1e16506667a58fdc8b5d5259d8"} Oct 11 05:07:08 crc kubenswrapper[4651]: I1011 05:07:08.817060 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"aef9d930-4287-490f-85c7-5a791f985a77","Type":"ContainerStarted","Data":"bcc9649ef0abbd0bbff0303d14a4ae9de8dd0144ce1e9261ae4ad59bcfb52634"} Oct 11 05:07:09 crc kubenswrapper[4651]: I1011 05:07:09.120195 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-4gdx9-config-5dcn9" Oct 11 05:07:09 crc kubenswrapper[4651]: I1011 05:07:09.232051 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/6e689799-040b-4ac2-bfda-91726bc08ce8-var-log-ovn\") pod \"6e689799-040b-4ac2-bfda-91726bc08ce8\" (UID: \"6e689799-040b-4ac2-bfda-91726bc08ce8\") " Oct 11 05:07:09 crc kubenswrapper[4651]: I1011 05:07:09.232164 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/6e689799-040b-4ac2-bfda-91726bc08ce8-var-run-ovn\") pod \"6e689799-040b-4ac2-bfda-91726bc08ce8\" (UID: \"6e689799-040b-4ac2-bfda-91726bc08ce8\") " Oct 11 05:07:09 crc kubenswrapper[4651]: I1011 05:07:09.232206 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6e689799-040b-4ac2-bfda-91726bc08ce8-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "6e689799-040b-4ac2-bfda-91726bc08ce8" (UID: "6e689799-040b-4ac2-bfda-91726bc08ce8"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 11 05:07:09 crc kubenswrapper[4651]: I1011 05:07:09.232284 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-csd96\" (UniqueName: \"kubernetes.io/projected/6e689799-040b-4ac2-bfda-91726bc08ce8-kube-api-access-csd96\") pod \"6e689799-040b-4ac2-bfda-91726bc08ce8\" (UID: \"6e689799-040b-4ac2-bfda-91726bc08ce8\") " Oct 11 05:07:09 crc kubenswrapper[4651]: I1011 05:07:09.232353 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/6e689799-040b-4ac2-bfda-91726bc08ce8-var-run\") pod \"6e689799-040b-4ac2-bfda-91726bc08ce8\" (UID: \"6e689799-040b-4ac2-bfda-91726bc08ce8\") " Oct 11 05:07:09 crc kubenswrapper[4651]: I1011 05:07:09.232372 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6e689799-040b-4ac2-bfda-91726bc08ce8-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "6e689799-040b-4ac2-bfda-91726bc08ce8" (UID: "6e689799-040b-4ac2-bfda-91726bc08ce8"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 11 05:07:09 crc kubenswrapper[4651]: I1011 05:07:09.232393 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/6e689799-040b-4ac2-bfda-91726bc08ce8-additional-scripts\") pod \"6e689799-040b-4ac2-bfda-91726bc08ce8\" (UID: \"6e689799-040b-4ac2-bfda-91726bc08ce8\") " Oct 11 05:07:09 crc kubenswrapper[4651]: I1011 05:07:09.232442 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6e689799-040b-4ac2-bfda-91726bc08ce8-scripts\") pod \"6e689799-040b-4ac2-bfda-91726bc08ce8\" (UID: \"6e689799-040b-4ac2-bfda-91726bc08ce8\") " Oct 11 05:07:09 crc kubenswrapper[4651]: I1011 05:07:09.232868 4651 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/6e689799-040b-4ac2-bfda-91726bc08ce8-var-log-ovn\") on node \"crc\" DevicePath \"\"" Oct 11 05:07:09 crc kubenswrapper[4651]: I1011 05:07:09.232890 4651 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/6e689799-040b-4ac2-bfda-91726bc08ce8-var-run-ovn\") on node \"crc\" DevicePath \"\"" Oct 11 05:07:09 crc kubenswrapper[4651]: I1011 05:07:09.232935 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6e689799-040b-4ac2-bfda-91726bc08ce8-var-run" (OuterVolumeSpecName: "var-run") pod "6e689799-040b-4ac2-bfda-91726bc08ce8" (UID: "6e689799-040b-4ac2-bfda-91726bc08ce8"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 11 05:07:09 crc kubenswrapper[4651]: I1011 05:07:09.234755 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6e689799-040b-4ac2-bfda-91726bc08ce8-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "6e689799-040b-4ac2-bfda-91726bc08ce8" (UID: "6e689799-040b-4ac2-bfda-91726bc08ce8"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:07:09 crc kubenswrapper[4651]: I1011 05:07:09.234875 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6e689799-040b-4ac2-bfda-91726bc08ce8-scripts" (OuterVolumeSpecName: "scripts") pod "6e689799-040b-4ac2-bfda-91726bc08ce8" (UID: "6e689799-040b-4ac2-bfda-91726bc08ce8"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:07:09 crc kubenswrapper[4651]: I1011 05:07:09.241140 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6e689799-040b-4ac2-bfda-91726bc08ce8-kube-api-access-csd96" (OuterVolumeSpecName: "kube-api-access-csd96") pod "6e689799-040b-4ac2-bfda-91726bc08ce8" (UID: "6e689799-040b-4ac2-bfda-91726bc08ce8"). InnerVolumeSpecName "kube-api-access-csd96". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:07:09 crc kubenswrapper[4651]: I1011 05:07:09.334833 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-csd96\" (UniqueName: \"kubernetes.io/projected/6e689799-040b-4ac2-bfda-91726bc08ce8-kube-api-access-csd96\") on node \"crc\" DevicePath \"\"" Oct 11 05:07:09 crc kubenswrapper[4651]: I1011 05:07:09.334865 4651 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/6e689799-040b-4ac2-bfda-91726bc08ce8-var-run\") on node \"crc\" DevicePath \"\"" Oct 11 05:07:09 crc kubenswrapper[4651]: I1011 05:07:09.334876 4651 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/6e689799-040b-4ac2-bfda-91726bc08ce8-additional-scripts\") on node \"crc\" DevicePath \"\"" Oct 11 05:07:09 crc kubenswrapper[4651]: I1011 05:07:09.334887 4651 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6e689799-040b-4ac2-bfda-91726bc08ce8-scripts\") on node \"crc\" DevicePath \"\"" Oct 11 05:07:09 crc kubenswrapper[4651]: I1011 05:07:09.448694 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-4gdx9-config-5dcn9"] Oct 11 05:07:09 crc kubenswrapper[4651]: I1011 05:07:09.457310 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-4gdx9-config-5dcn9"] Oct 11 05:07:09 crc kubenswrapper[4651]: I1011 05:07:09.552926 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-4gdx9-config-kjxwc"] Oct 11 05:07:09 crc kubenswrapper[4651]: E1011 05:07:09.553271 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e689799-040b-4ac2-bfda-91726bc08ce8" containerName="ovn-config" Oct 11 05:07:09 crc kubenswrapper[4651]: I1011 05:07:09.553289 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e689799-040b-4ac2-bfda-91726bc08ce8" containerName="ovn-config" Oct 11 05:07:09 crc kubenswrapper[4651]: I1011 05:07:09.553452 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="6e689799-040b-4ac2-bfda-91726bc08ce8" containerName="ovn-config" Oct 11 05:07:09 crc kubenswrapper[4651]: I1011 05:07:09.553974 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-4gdx9-config-kjxwc" Oct 11 05:07:09 crc kubenswrapper[4651]: I1011 05:07:09.566998 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-4gdx9-config-kjxwc"] Oct 11 05:07:09 crc kubenswrapper[4651]: I1011 05:07:09.639061 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/2f298166-9707-4e69-89fd-ac9bfc23080b-var-run-ovn\") pod \"ovn-controller-4gdx9-config-kjxwc\" (UID: \"2f298166-9707-4e69-89fd-ac9bfc23080b\") " pod="openstack/ovn-controller-4gdx9-config-kjxwc" Oct 11 05:07:09 crc kubenswrapper[4651]: I1011 05:07:09.639117 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/2f298166-9707-4e69-89fd-ac9bfc23080b-var-run\") pod \"ovn-controller-4gdx9-config-kjxwc\" (UID: \"2f298166-9707-4e69-89fd-ac9bfc23080b\") " pod="openstack/ovn-controller-4gdx9-config-kjxwc" Oct 11 05:07:09 crc kubenswrapper[4651]: I1011 05:07:09.639174 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/2f298166-9707-4e69-89fd-ac9bfc23080b-additional-scripts\") pod \"ovn-controller-4gdx9-config-kjxwc\" (UID: \"2f298166-9707-4e69-89fd-ac9bfc23080b\") " pod="openstack/ovn-controller-4gdx9-config-kjxwc" Oct 11 05:07:09 crc kubenswrapper[4651]: I1011 05:07:09.639407 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/2f298166-9707-4e69-89fd-ac9bfc23080b-var-log-ovn\") pod \"ovn-controller-4gdx9-config-kjxwc\" (UID: \"2f298166-9707-4e69-89fd-ac9bfc23080b\") " pod="openstack/ovn-controller-4gdx9-config-kjxwc" Oct 11 05:07:09 crc kubenswrapper[4651]: I1011 05:07:09.639488 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2f298166-9707-4e69-89fd-ac9bfc23080b-scripts\") pod \"ovn-controller-4gdx9-config-kjxwc\" (UID: \"2f298166-9707-4e69-89fd-ac9bfc23080b\") " pod="openstack/ovn-controller-4gdx9-config-kjxwc" Oct 11 05:07:09 crc kubenswrapper[4651]: I1011 05:07:09.639524 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mz86z\" (UniqueName: \"kubernetes.io/projected/2f298166-9707-4e69-89fd-ac9bfc23080b-kube-api-access-mz86z\") pod \"ovn-controller-4gdx9-config-kjxwc\" (UID: \"2f298166-9707-4e69-89fd-ac9bfc23080b\") " pod="openstack/ovn-controller-4gdx9-config-kjxwc" Oct 11 05:07:09 crc kubenswrapper[4651]: I1011 05:07:09.740899 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/2f298166-9707-4e69-89fd-ac9bfc23080b-additional-scripts\") pod \"ovn-controller-4gdx9-config-kjxwc\" (UID: \"2f298166-9707-4e69-89fd-ac9bfc23080b\") " pod="openstack/ovn-controller-4gdx9-config-kjxwc" Oct 11 05:07:09 crc kubenswrapper[4651]: I1011 05:07:09.740942 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/2f298166-9707-4e69-89fd-ac9bfc23080b-var-log-ovn\") pod \"ovn-controller-4gdx9-config-kjxwc\" (UID: \"2f298166-9707-4e69-89fd-ac9bfc23080b\") " pod="openstack/ovn-controller-4gdx9-config-kjxwc" Oct 11 05:07:09 crc kubenswrapper[4651]: I1011 05:07:09.741003 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2f298166-9707-4e69-89fd-ac9bfc23080b-scripts\") pod \"ovn-controller-4gdx9-config-kjxwc\" (UID: \"2f298166-9707-4e69-89fd-ac9bfc23080b\") " pod="openstack/ovn-controller-4gdx9-config-kjxwc" Oct 11 05:07:09 crc kubenswrapper[4651]: I1011 05:07:09.741022 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mz86z\" (UniqueName: \"kubernetes.io/projected/2f298166-9707-4e69-89fd-ac9bfc23080b-kube-api-access-mz86z\") pod \"ovn-controller-4gdx9-config-kjxwc\" (UID: \"2f298166-9707-4e69-89fd-ac9bfc23080b\") " pod="openstack/ovn-controller-4gdx9-config-kjxwc" Oct 11 05:07:09 crc kubenswrapper[4651]: I1011 05:07:09.741070 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/2f298166-9707-4e69-89fd-ac9bfc23080b-var-run-ovn\") pod \"ovn-controller-4gdx9-config-kjxwc\" (UID: \"2f298166-9707-4e69-89fd-ac9bfc23080b\") " pod="openstack/ovn-controller-4gdx9-config-kjxwc" Oct 11 05:07:09 crc kubenswrapper[4651]: I1011 05:07:09.741092 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/2f298166-9707-4e69-89fd-ac9bfc23080b-var-run\") pod \"ovn-controller-4gdx9-config-kjxwc\" (UID: \"2f298166-9707-4e69-89fd-ac9bfc23080b\") " pod="openstack/ovn-controller-4gdx9-config-kjxwc" Oct 11 05:07:09 crc kubenswrapper[4651]: I1011 05:07:09.741400 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/2f298166-9707-4e69-89fd-ac9bfc23080b-var-run\") pod \"ovn-controller-4gdx9-config-kjxwc\" (UID: \"2f298166-9707-4e69-89fd-ac9bfc23080b\") " pod="openstack/ovn-controller-4gdx9-config-kjxwc" Oct 11 05:07:09 crc kubenswrapper[4651]: I1011 05:07:09.741676 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/2f298166-9707-4e69-89fd-ac9bfc23080b-var-log-ovn\") pod \"ovn-controller-4gdx9-config-kjxwc\" (UID: \"2f298166-9707-4e69-89fd-ac9bfc23080b\") " pod="openstack/ovn-controller-4gdx9-config-kjxwc" Oct 11 05:07:09 crc kubenswrapper[4651]: I1011 05:07:09.741742 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/2f298166-9707-4e69-89fd-ac9bfc23080b-var-run-ovn\") pod \"ovn-controller-4gdx9-config-kjxwc\" (UID: \"2f298166-9707-4e69-89fd-ac9bfc23080b\") " pod="openstack/ovn-controller-4gdx9-config-kjxwc" Oct 11 05:07:09 crc kubenswrapper[4651]: I1011 05:07:09.742424 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/2f298166-9707-4e69-89fd-ac9bfc23080b-additional-scripts\") pod \"ovn-controller-4gdx9-config-kjxwc\" (UID: \"2f298166-9707-4e69-89fd-ac9bfc23080b\") " pod="openstack/ovn-controller-4gdx9-config-kjxwc" Oct 11 05:07:09 crc kubenswrapper[4651]: I1011 05:07:09.743406 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2f298166-9707-4e69-89fd-ac9bfc23080b-scripts\") pod \"ovn-controller-4gdx9-config-kjxwc\" (UID: \"2f298166-9707-4e69-89fd-ac9bfc23080b\") " pod="openstack/ovn-controller-4gdx9-config-kjxwc" Oct 11 05:07:09 crc kubenswrapper[4651]: I1011 05:07:09.760157 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mz86z\" (UniqueName: \"kubernetes.io/projected/2f298166-9707-4e69-89fd-ac9bfc23080b-kube-api-access-mz86z\") pod \"ovn-controller-4gdx9-config-kjxwc\" (UID: \"2f298166-9707-4e69-89fd-ac9bfc23080b\") " pod="openstack/ovn-controller-4gdx9-config-kjxwc" Oct 11 05:07:09 crc kubenswrapper[4651]: I1011 05:07:09.824767 4651 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e370b5ff406804facca5acdba48614b59d57b9d03166a142989f0b34787596d5" Oct 11 05:07:09 crc kubenswrapper[4651]: I1011 05:07:09.824864 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-4gdx9-config-5dcn9" Oct 11 05:07:09 crc kubenswrapper[4651]: I1011 05:07:09.878144 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-4gdx9-config-kjxwc" Oct 11 05:07:09 crc kubenswrapper[4651]: I1011 05:07:09.894114 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6e689799-040b-4ac2-bfda-91726bc08ce8" path="/var/lib/kubelet/pods/6e689799-040b-4ac2-bfda-91726bc08ce8/volumes" Oct 11 05:07:09 crc kubenswrapper[4651]: I1011 05:07:09.930554 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-4gdx9" Oct 11 05:07:10 crc kubenswrapper[4651]: I1011 05:07:10.584438 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-4gdx9-config-kjxwc"] Oct 11 05:07:10 crc kubenswrapper[4651]: I1011 05:07:10.838711 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"aef9d930-4287-490f-85c7-5a791f985a77","Type":"ContainerStarted","Data":"23e11601be7cb137e24c884d744a9aab6fdfe66948ca5d5fcdbe5fd69183cb0c"} Oct 11 05:07:10 crc kubenswrapper[4651]: I1011 05:07:10.838754 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"aef9d930-4287-490f-85c7-5a791f985a77","Type":"ContainerStarted","Data":"346f1167b2420fb1aef72a27f3bd757f115673026d3f106f4dc7a6e7aac2c8d2"} Oct 11 05:07:10 crc kubenswrapper[4651]: I1011 05:07:10.841202 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-4gdx9-config-kjxwc" event={"ID":"2f298166-9707-4e69-89fd-ac9bfc23080b","Type":"ContainerStarted","Data":"aa9957f3d2a5128701acc17bab2e4707bc0087900193f240472d2dcd8c2a7a94"} Oct 11 05:07:11 crc kubenswrapper[4651]: I1011 05:07:11.859273 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"aef9d930-4287-490f-85c7-5a791f985a77","Type":"ContainerStarted","Data":"39efccf6b0296ff16a1aac1b4e7de432867ad83b31a3dc847bcd6768260a9233"} Oct 11 05:07:11 crc kubenswrapper[4651]: I1011 05:07:11.859601 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"aef9d930-4287-490f-85c7-5a791f985a77","Type":"ContainerStarted","Data":"52d5e470ca0270d8eb841b23c0c98491e1d4ccc5ba69af3520af32f65ce140c5"} Oct 11 05:07:11 crc kubenswrapper[4651]: I1011 05:07:11.859708 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"aef9d930-4287-490f-85c7-5a791f985a77","Type":"ContainerStarted","Data":"6037199b3618d3d87f95c7250bb9e47a8795bf342869447bb26e4636599c91d3"} Oct 11 05:07:11 crc kubenswrapper[4651]: I1011 05:07:11.859718 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"aef9d930-4287-490f-85c7-5a791f985a77","Type":"ContainerStarted","Data":"44e596f50a3a8e40e4baecae47acac3633a36a7851310e648fd863cfe1562803"} Oct 11 05:07:11 crc kubenswrapper[4651]: I1011 05:07:11.859727 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"aef9d930-4287-490f-85c7-5a791f985a77","Type":"ContainerStarted","Data":"106bb59627cc5f072a061f2fd3f473c157e2940b2f489c7f1fd14c54b7db1e96"} Oct 11 05:07:11 crc kubenswrapper[4651]: I1011 05:07:11.862674 4651 generic.go:334] "Generic (PLEG): container finished" podID="2f298166-9707-4e69-89fd-ac9bfc23080b" containerID="dba91db59b330a75a88f4bf1b3e3b3b23f41ca7ce0d75ff9776900bf33cdd2bc" exitCode=0 Oct 11 05:07:11 crc kubenswrapper[4651]: I1011 05:07:11.862715 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-4gdx9-config-kjxwc" event={"ID":"2f298166-9707-4e69-89fd-ac9bfc23080b","Type":"ContainerDied","Data":"dba91db59b330a75a88f4bf1b3e3b3b23f41ca7ce0d75ff9776900bf33cdd2bc"} Oct 11 05:07:11 crc kubenswrapper[4651]: I1011 05:07:11.920841 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-storage-0" podStartSLOduration=20.271142686 podStartE2EDuration="25.920801757s" podCreationTimestamp="2025-10-11 05:06:46 +0000 UTC" firstStartedPulling="2025-10-11 05:07:04.443100008 +0000 UTC m=+945.339332804" lastFinishedPulling="2025-10-11 05:07:10.092759079 +0000 UTC m=+950.988991875" observedRunningTime="2025-10-11 05:07:11.914338492 +0000 UTC m=+952.810571348" watchObservedRunningTime="2025-10-11 05:07:11.920801757 +0000 UTC m=+952.817034553" Oct 11 05:07:12 crc kubenswrapper[4651]: I1011 05:07:12.167980 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6d5b6d6b67-s97wk"] Oct 11 05:07:12 crc kubenswrapper[4651]: I1011 05:07:12.169313 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6d5b6d6b67-s97wk" Oct 11 05:07:12 crc kubenswrapper[4651]: I1011 05:07:12.171219 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-swift-storage-0" Oct 11 05:07:12 crc kubenswrapper[4651]: I1011 05:07:12.181466 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6d5b6d6b67-s97wk"] Oct 11 05:07:12 crc kubenswrapper[4651]: I1011 05:07:12.275236 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4fb44cbf-367e-4d0e-95f6-7411e5b76817-dns-svc\") pod \"dnsmasq-dns-6d5b6d6b67-s97wk\" (UID: \"4fb44cbf-367e-4d0e-95f6-7411e5b76817\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-s97wk" Oct 11 05:07:12 crc kubenswrapper[4651]: I1011 05:07:12.275297 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4fb44cbf-367e-4d0e-95f6-7411e5b76817-config\") pod \"dnsmasq-dns-6d5b6d6b67-s97wk\" (UID: \"4fb44cbf-367e-4d0e-95f6-7411e5b76817\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-s97wk" Oct 11 05:07:12 crc kubenswrapper[4651]: I1011 05:07:12.275323 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4fb44cbf-367e-4d0e-95f6-7411e5b76817-dns-swift-storage-0\") pod \"dnsmasq-dns-6d5b6d6b67-s97wk\" (UID: \"4fb44cbf-367e-4d0e-95f6-7411e5b76817\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-s97wk" Oct 11 05:07:12 crc kubenswrapper[4651]: I1011 05:07:12.275342 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4fb44cbf-367e-4d0e-95f6-7411e5b76817-ovsdbserver-sb\") pod \"dnsmasq-dns-6d5b6d6b67-s97wk\" (UID: \"4fb44cbf-367e-4d0e-95f6-7411e5b76817\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-s97wk" Oct 11 05:07:12 crc kubenswrapper[4651]: I1011 05:07:12.275370 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4fb44cbf-367e-4d0e-95f6-7411e5b76817-ovsdbserver-nb\") pod \"dnsmasq-dns-6d5b6d6b67-s97wk\" (UID: \"4fb44cbf-367e-4d0e-95f6-7411e5b76817\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-s97wk" Oct 11 05:07:12 crc kubenswrapper[4651]: I1011 05:07:12.275507 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rmwnd\" (UniqueName: \"kubernetes.io/projected/4fb44cbf-367e-4d0e-95f6-7411e5b76817-kube-api-access-rmwnd\") pod \"dnsmasq-dns-6d5b6d6b67-s97wk\" (UID: \"4fb44cbf-367e-4d0e-95f6-7411e5b76817\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-s97wk" Oct 11 05:07:12 crc kubenswrapper[4651]: I1011 05:07:12.376877 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rmwnd\" (UniqueName: \"kubernetes.io/projected/4fb44cbf-367e-4d0e-95f6-7411e5b76817-kube-api-access-rmwnd\") pod \"dnsmasq-dns-6d5b6d6b67-s97wk\" (UID: \"4fb44cbf-367e-4d0e-95f6-7411e5b76817\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-s97wk" Oct 11 05:07:12 crc kubenswrapper[4651]: I1011 05:07:12.376953 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4fb44cbf-367e-4d0e-95f6-7411e5b76817-dns-svc\") pod \"dnsmasq-dns-6d5b6d6b67-s97wk\" (UID: \"4fb44cbf-367e-4d0e-95f6-7411e5b76817\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-s97wk" Oct 11 05:07:12 crc kubenswrapper[4651]: I1011 05:07:12.376981 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4fb44cbf-367e-4d0e-95f6-7411e5b76817-config\") pod \"dnsmasq-dns-6d5b6d6b67-s97wk\" (UID: \"4fb44cbf-367e-4d0e-95f6-7411e5b76817\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-s97wk" Oct 11 05:07:12 crc kubenswrapper[4651]: I1011 05:07:12.377002 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4fb44cbf-367e-4d0e-95f6-7411e5b76817-dns-swift-storage-0\") pod \"dnsmasq-dns-6d5b6d6b67-s97wk\" (UID: \"4fb44cbf-367e-4d0e-95f6-7411e5b76817\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-s97wk" Oct 11 05:07:12 crc kubenswrapper[4651]: I1011 05:07:12.377018 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4fb44cbf-367e-4d0e-95f6-7411e5b76817-ovsdbserver-sb\") pod \"dnsmasq-dns-6d5b6d6b67-s97wk\" (UID: \"4fb44cbf-367e-4d0e-95f6-7411e5b76817\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-s97wk" Oct 11 05:07:12 crc kubenswrapper[4651]: I1011 05:07:12.377042 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4fb44cbf-367e-4d0e-95f6-7411e5b76817-ovsdbserver-nb\") pod \"dnsmasq-dns-6d5b6d6b67-s97wk\" (UID: \"4fb44cbf-367e-4d0e-95f6-7411e5b76817\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-s97wk" Oct 11 05:07:12 crc kubenswrapper[4651]: I1011 05:07:12.378135 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4fb44cbf-367e-4d0e-95f6-7411e5b76817-ovsdbserver-nb\") pod \"dnsmasq-dns-6d5b6d6b67-s97wk\" (UID: \"4fb44cbf-367e-4d0e-95f6-7411e5b76817\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-s97wk" Oct 11 05:07:12 crc kubenswrapper[4651]: I1011 05:07:12.378161 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4fb44cbf-367e-4d0e-95f6-7411e5b76817-config\") pod \"dnsmasq-dns-6d5b6d6b67-s97wk\" (UID: \"4fb44cbf-367e-4d0e-95f6-7411e5b76817\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-s97wk" Oct 11 05:07:12 crc kubenswrapper[4651]: I1011 05:07:12.378166 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4fb44cbf-367e-4d0e-95f6-7411e5b76817-dns-swift-storage-0\") pod \"dnsmasq-dns-6d5b6d6b67-s97wk\" (UID: \"4fb44cbf-367e-4d0e-95f6-7411e5b76817\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-s97wk" Oct 11 05:07:12 crc kubenswrapper[4651]: I1011 05:07:12.378214 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4fb44cbf-367e-4d0e-95f6-7411e5b76817-dns-svc\") pod \"dnsmasq-dns-6d5b6d6b67-s97wk\" (UID: \"4fb44cbf-367e-4d0e-95f6-7411e5b76817\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-s97wk" Oct 11 05:07:12 crc kubenswrapper[4651]: I1011 05:07:12.378679 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4fb44cbf-367e-4d0e-95f6-7411e5b76817-ovsdbserver-sb\") pod \"dnsmasq-dns-6d5b6d6b67-s97wk\" (UID: \"4fb44cbf-367e-4d0e-95f6-7411e5b76817\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-s97wk" Oct 11 05:07:12 crc kubenswrapper[4651]: I1011 05:07:12.395724 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rmwnd\" (UniqueName: \"kubernetes.io/projected/4fb44cbf-367e-4d0e-95f6-7411e5b76817-kube-api-access-rmwnd\") pod \"dnsmasq-dns-6d5b6d6b67-s97wk\" (UID: \"4fb44cbf-367e-4d0e-95f6-7411e5b76817\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-s97wk" Oct 11 05:07:12 crc kubenswrapper[4651]: I1011 05:07:12.485543 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6d5b6d6b67-s97wk" Oct 11 05:07:16 crc kubenswrapper[4651]: I1011 05:07:16.310260 4651 patch_prober.go:28] interesting pod/machine-config-daemon-78jnv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 05:07:16 crc kubenswrapper[4651]: I1011 05:07:16.310639 4651 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 05:07:16 crc kubenswrapper[4651]: I1011 05:07:16.310695 4651 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" Oct 11 05:07:16 crc kubenswrapper[4651]: I1011 05:07:16.311399 4651 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"bbcfc5308211a05ce73ce546a00d78ee49c4d35fa44427537e93a8a405fe9270"} pod="openshift-machine-config-operator/machine-config-daemon-78jnv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 11 05:07:16 crc kubenswrapper[4651]: I1011 05:07:16.311454 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" containerName="machine-config-daemon" containerID="cri-o://bbcfc5308211a05ce73ce546a00d78ee49c4d35fa44427537e93a8a405fe9270" gracePeriod=600 Oct 11 05:07:16 crc kubenswrapper[4651]: I1011 05:07:16.910091 4651 generic.go:334] "Generic (PLEG): container finished" podID="519a1ae1-e964-48b0-8b61-835146df28c1" containerID="bbcfc5308211a05ce73ce546a00d78ee49c4d35fa44427537e93a8a405fe9270" exitCode=0 Oct 11 05:07:16 crc kubenswrapper[4651]: I1011 05:07:16.910157 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" event={"ID":"519a1ae1-e964-48b0-8b61-835146df28c1","Type":"ContainerDied","Data":"bbcfc5308211a05ce73ce546a00d78ee49c4d35fa44427537e93a8a405fe9270"} Oct 11 05:07:16 crc kubenswrapper[4651]: I1011 05:07:16.910231 4651 scope.go:117] "RemoveContainer" containerID="f3d31fd3172b3e1939d18cb8fc4eb85b3b6d1b1c4f71fa7a9aed3462d80c8443" Oct 11 05:07:18 crc kubenswrapper[4651]: I1011 05:07:18.337576 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-4gdx9-config-kjxwc" Oct 11 05:07:18 crc kubenswrapper[4651]: I1011 05:07:18.490877 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/2f298166-9707-4e69-89fd-ac9bfc23080b-var-run\") pod \"2f298166-9707-4e69-89fd-ac9bfc23080b\" (UID: \"2f298166-9707-4e69-89fd-ac9bfc23080b\") " Oct 11 05:07:18 crc kubenswrapper[4651]: I1011 05:07:18.490946 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/2f298166-9707-4e69-89fd-ac9bfc23080b-var-log-ovn\") pod \"2f298166-9707-4e69-89fd-ac9bfc23080b\" (UID: \"2f298166-9707-4e69-89fd-ac9bfc23080b\") " Oct 11 05:07:18 crc kubenswrapper[4651]: I1011 05:07:18.491002 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/2f298166-9707-4e69-89fd-ac9bfc23080b-additional-scripts\") pod \"2f298166-9707-4e69-89fd-ac9bfc23080b\" (UID: \"2f298166-9707-4e69-89fd-ac9bfc23080b\") " Oct 11 05:07:18 crc kubenswrapper[4651]: I1011 05:07:18.491047 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mz86z\" (UniqueName: \"kubernetes.io/projected/2f298166-9707-4e69-89fd-ac9bfc23080b-kube-api-access-mz86z\") pod \"2f298166-9707-4e69-89fd-ac9bfc23080b\" (UID: \"2f298166-9707-4e69-89fd-ac9bfc23080b\") " Oct 11 05:07:18 crc kubenswrapper[4651]: I1011 05:07:18.491114 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2f298166-9707-4e69-89fd-ac9bfc23080b-scripts\") pod \"2f298166-9707-4e69-89fd-ac9bfc23080b\" (UID: \"2f298166-9707-4e69-89fd-ac9bfc23080b\") " Oct 11 05:07:18 crc kubenswrapper[4651]: I1011 05:07:18.491164 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/2f298166-9707-4e69-89fd-ac9bfc23080b-var-run-ovn\") pod \"2f298166-9707-4e69-89fd-ac9bfc23080b\" (UID: \"2f298166-9707-4e69-89fd-ac9bfc23080b\") " Oct 11 05:07:18 crc kubenswrapper[4651]: I1011 05:07:18.491460 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2f298166-9707-4e69-89fd-ac9bfc23080b-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "2f298166-9707-4e69-89fd-ac9bfc23080b" (UID: "2f298166-9707-4e69-89fd-ac9bfc23080b"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 11 05:07:18 crc kubenswrapper[4651]: I1011 05:07:18.491493 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2f298166-9707-4e69-89fd-ac9bfc23080b-var-run" (OuterVolumeSpecName: "var-run") pod "2f298166-9707-4e69-89fd-ac9bfc23080b" (UID: "2f298166-9707-4e69-89fd-ac9bfc23080b"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 11 05:07:18 crc kubenswrapper[4651]: I1011 05:07:18.491507 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2f298166-9707-4e69-89fd-ac9bfc23080b-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "2f298166-9707-4e69-89fd-ac9bfc23080b" (UID: "2f298166-9707-4e69-89fd-ac9bfc23080b"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 11 05:07:18 crc kubenswrapper[4651]: I1011 05:07:18.492127 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2f298166-9707-4e69-89fd-ac9bfc23080b-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "2f298166-9707-4e69-89fd-ac9bfc23080b" (UID: "2f298166-9707-4e69-89fd-ac9bfc23080b"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:07:18 crc kubenswrapper[4651]: I1011 05:07:18.492779 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2f298166-9707-4e69-89fd-ac9bfc23080b-scripts" (OuterVolumeSpecName: "scripts") pod "2f298166-9707-4e69-89fd-ac9bfc23080b" (UID: "2f298166-9707-4e69-89fd-ac9bfc23080b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:07:18 crc kubenswrapper[4651]: I1011 05:07:18.497446 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2f298166-9707-4e69-89fd-ac9bfc23080b-kube-api-access-mz86z" (OuterVolumeSpecName: "kube-api-access-mz86z") pod "2f298166-9707-4e69-89fd-ac9bfc23080b" (UID: "2f298166-9707-4e69-89fd-ac9bfc23080b"). InnerVolumeSpecName "kube-api-access-mz86z". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:07:18 crc kubenswrapper[4651]: I1011 05:07:18.592996 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mz86z\" (UniqueName: \"kubernetes.io/projected/2f298166-9707-4e69-89fd-ac9bfc23080b-kube-api-access-mz86z\") on node \"crc\" DevicePath \"\"" Oct 11 05:07:18 crc kubenswrapper[4651]: I1011 05:07:18.593298 4651 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2f298166-9707-4e69-89fd-ac9bfc23080b-scripts\") on node \"crc\" DevicePath \"\"" Oct 11 05:07:18 crc kubenswrapper[4651]: I1011 05:07:18.593308 4651 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/2f298166-9707-4e69-89fd-ac9bfc23080b-var-run-ovn\") on node \"crc\" DevicePath \"\"" Oct 11 05:07:18 crc kubenswrapper[4651]: I1011 05:07:18.593318 4651 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/2f298166-9707-4e69-89fd-ac9bfc23080b-var-run\") on node \"crc\" DevicePath \"\"" Oct 11 05:07:18 crc kubenswrapper[4651]: I1011 05:07:18.593326 4651 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/2f298166-9707-4e69-89fd-ac9bfc23080b-var-log-ovn\") on node \"crc\" DevicePath \"\"" Oct 11 05:07:18 crc kubenswrapper[4651]: I1011 05:07:18.593335 4651 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/2f298166-9707-4e69-89fd-ac9bfc23080b-additional-scripts\") on node \"crc\" DevicePath \"\"" Oct 11 05:07:18 crc kubenswrapper[4651]: I1011 05:07:18.673682 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6d5b6d6b67-s97wk"] Oct 11 05:07:18 crc kubenswrapper[4651]: W1011 05:07:18.678186 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4fb44cbf_367e_4d0e_95f6_7411e5b76817.slice/crio-a38242094dae6c2b83d4a81f9f113ade0605c607ba64045d8aa3e7b6bf37964f WatchSource:0}: Error finding container a38242094dae6c2b83d4a81f9f113ade0605c607ba64045d8aa3e7b6bf37964f: Status 404 returned error can't find the container with id a38242094dae6c2b83d4a81f9f113ade0605c607ba64045d8aa3e7b6bf37964f Oct 11 05:07:18 crc kubenswrapper[4651]: I1011 05:07:18.926149 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-4gdx9-config-kjxwc" Oct 11 05:07:18 crc kubenswrapper[4651]: I1011 05:07:18.926153 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-4gdx9-config-kjxwc" event={"ID":"2f298166-9707-4e69-89fd-ac9bfc23080b","Type":"ContainerDied","Data":"aa9957f3d2a5128701acc17bab2e4707bc0087900193f240472d2dcd8c2a7a94"} Oct 11 05:07:18 crc kubenswrapper[4651]: I1011 05:07:18.926889 4651 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="aa9957f3d2a5128701acc17bab2e4707bc0087900193f240472d2dcd8c2a7a94" Oct 11 05:07:18 crc kubenswrapper[4651]: I1011 05:07:18.929307 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" event={"ID":"519a1ae1-e964-48b0-8b61-835146df28c1","Type":"ContainerStarted","Data":"58a7a61e6423d5c4aad48bf422e788efe6a0897625015570766366bb08a19f53"} Oct 11 05:07:18 crc kubenswrapper[4651]: I1011 05:07:18.931289 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-j5wrj" event={"ID":"d7e37fff-86fd-435f-b124-27f7c2afb74d","Type":"ContainerStarted","Data":"5063e393f084e925c50d94f5fe68ea874eea10559384d86483c21265911bee04"} Oct 11 05:07:18 crc kubenswrapper[4651]: I1011 05:07:18.933972 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d5b6d6b67-s97wk" event={"ID":"4fb44cbf-367e-4d0e-95f6-7411e5b76817","Type":"ContainerStarted","Data":"4365429e5a86c310c6592e2cebbac5f174487c546f08fb667e2532d2c07b6357"} Oct 11 05:07:18 crc kubenswrapper[4651]: I1011 05:07:18.934039 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d5b6d6b67-s97wk" event={"ID":"4fb44cbf-367e-4d0e-95f6-7411e5b76817","Type":"ContainerStarted","Data":"a38242094dae6c2b83d4a81f9f113ade0605c607ba64045d8aa3e7b6bf37964f"} Oct 11 05:07:18 crc kubenswrapper[4651]: I1011 05:07:18.989969 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-j5wrj" podStartSLOduration=2.630728345 podStartE2EDuration="14.98995103s" podCreationTimestamp="2025-10-11 05:07:04 +0000 UTC" firstStartedPulling="2025-10-11 05:07:05.919532547 +0000 UTC m=+946.815765343" lastFinishedPulling="2025-10-11 05:07:18.278755232 +0000 UTC m=+959.174988028" observedRunningTime="2025-10-11 05:07:18.985420074 +0000 UTC m=+959.881652880" watchObservedRunningTime="2025-10-11 05:07:18.98995103 +0000 UTC m=+959.886183826" Oct 11 05:07:19 crc kubenswrapper[4651]: I1011 05:07:19.436394 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-4gdx9-config-kjxwc"] Oct 11 05:07:19 crc kubenswrapper[4651]: I1011 05:07:19.450580 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-4gdx9-config-kjxwc"] Oct 11 05:07:19 crc kubenswrapper[4651]: I1011 05:07:19.887578 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2f298166-9707-4e69-89fd-ac9bfc23080b" path="/var/lib/kubelet/pods/2f298166-9707-4e69-89fd-ac9bfc23080b/volumes" Oct 11 05:07:19 crc kubenswrapper[4651]: I1011 05:07:19.946979 4651 generic.go:334] "Generic (PLEG): container finished" podID="4fb44cbf-367e-4d0e-95f6-7411e5b76817" containerID="4365429e5a86c310c6592e2cebbac5f174487c546f08fb667e2532d2c07b6357" exitCode=0 Oct 11 05:07:19 crc kubenswrapper[4651]: I1011 05:07:19.948394 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d5b6d6b67-s97wk" event={"ID":"4fb44cbf-367e-4d0e-95f6-7411e5b76817","Type":"ContainerDied","Data":"4365429e5a86c310c6592e2cebbac5f174487c546f08fb667e2532d2c07b6357"} Oct 11 05:07:20 crc kubenswrapper[4651]: I1011 05:07:20.951955 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Oct 11 05:07:20 crc kubenswrapper[4651]: I1011 05:07:20.958331 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d5b6d6b67-s97wk" event={"ID":"4fb44cbf-367e-4d0e-95f6-7411e5b76817","Type":"ContainerStarted","Data":"7704156b0cbfa5e6bfb0dc4ce81d0ef1948d4485444be7b2fc4b5a3768746120"} Oct 11 05:07:20 crc kubenswrapper[4651]: I1011 05:07:20.958555 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6d5b6d6b67-s97wk" Oct 11 05:07:21 crc kubenswrapper[4651]: I1011 05:07:21.012409 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6d5b6d6b67-s97wk" podStartSLOduration=9.012381293 podStartE2EDuration="9.012381293s" podCreationTimestamp="2025-10-11 05:07:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 05:07:20.998813957 +0000 UTC m=+961.895046813" watchObservedRunningTime="2025-10-11 05:07:21.012381293 +0000 UTC m=+961.908614129" Oct 11 05:07:21 crc kubenswrapper[4651]: I1011 05:07:21.212103 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Oct 11 05:07:21 crc kubenswrapper[4651]: I1011 05:07:21.302358 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-mkkvr"] Oct 11 05:07:21 crc kubenswrapper[4651]: E1011 05:07:21.302710 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f298166-9707-4e69-89fd-ac9bfc23080b" containerName="ovn-config" Oct 11 05:07:21 crc kubenswrapper[4651]: I1011 05:07:21.302724 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f298166-9707-4e69-89fd-ac9bfc23080b" containerName="ovn-config" Oct 11 05:07:21 crc kubenswrapper[4651]: I1011 05:07:21.302920 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="2f298166-9707-4e69-89fd-ac9bfc23080b" containerName="ovn-config" Oct 11 05:07:21 crc kubenswrapper[4651]: I1011 05:07:21.303406 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-mkkvr" Oct 11 05:07:21 crc kubenswrapper[4651]: I1011 05:07:21.329372 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-mkkvr"] Oct 11 05:07:21 crc kubenswrapper[4651]: I1011 05:07:21.397241 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-bbb7v"] Oct 11 05:07:21 crc kubenswrapper[4651]: I1011 05:07:21.398263 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-bbb7v" Oct 11 05:07:21 crc kubenswrapper[4651]: I1011 05:07:21.439165 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z7ltd\" (UniqueName: \"kubernetes.io/projected/e88f1ae5-9219-4c23-b6e2-a115005a1011-kube-api-access-z7ltd\") pod \"cinder-db-create-mkkvr\" (UID: \"e88f1ae5-9219-4c23-b6e2-a115005a1011\") " pod="openstack/cinder-db-create-mkkvr" Oct 11 05:07:21 crc kubenswrapper[4651]: I1011 05:07:21.465655 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-bbb7v"] Oct 11 05:07:21 crc kubenswrapper[4651]: I1011 05:07:21.510087 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-n4zlv"] Oct 11 05:07:21 crc kubenswrapper[4651]: I1011 05:07:21.511420 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-n4zlv" Oct 11 05:07:21 crc kubenswrapper[4651]: I1011 05:07:21.530122 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-n4zlv"] Oct 11 05:07:21 crc kubenswrapper[4651]: I1011 05:07:21.548207 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z7ltd\" (UniqueName: \"kubernetes.io/projected/e88f1ae5-9219-4c23-b6e2-a115005a1011-kube-api-access-z7ltd\") pod \"cinder-db-create-mkkvr\" (UID: \"e88f1ae5-9219-4c23-b6e2-a115005a1011\") " pod="openstack/cinder-db-create-mkkvr" Oct 11 05:07:21 crc kubenswrapper[4651]: I1011 05:07:21.548254 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qfv4f\" (UniqueName: \"kubernetes.io/projected/d8996cb1-d92c-4ec2-96b3-0aa6f643c3da-kube-api-access-qfv4f\") pod \"barbican-db-create-bbb7v\" (UID: \"d8996cb1-d92c-4ec2-96b3-0aa6f643c3da\") " pod="openstack/barbican-db-create-bbb7v" Oct 11 05:07:21 crc kubenswrapper[4651]: I1011 05:07:21.567181 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z7ltd\" (UniqueName: \"kubernetes.io/projected/e88f1ae5-9219-4c23-b6e2-a115005a1011-kube-api-access-z7ltd\") pod \"cinder-db-create-mkkvr\" (UID: \"e88f1ae5-9219-4c23-b6e2-a115005a1011\") " pod="openstack/cinder-db-create-mkkvr" Oct 11 05:07:21 crc kubenswrapper[4651]: I1011 05:07:21.583166 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-ktc4s"] Oct 11 05:07:21 crc kubenswrapper[4651]: I1011 05:07:21.584228 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-ktc4s" Oct 11 05:07:21 crc kubenswrapper[4651]: I1011 05:07:21.587347 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Oct 11 05:07:21 crc kubenswrapper[4651]: I1011 05:07:21.587597 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-2zq9v" Oct 11 05:07:21 crc kubenswrapper[4651]: I1011 05:07:21.587932 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Oct 11 05:07:21 crc kubenswrapper[4651]: I1011 05:07:21.588880 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Oct 11 05:07:21 crc kubenswrapper[4651]: I1011 05:07:21.593255 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-ktc4s"] Oct 11 05:07:21 crc kubenswrapper[4651]: I1011 05:07:21.630271 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-mkkvr" Oct 11 05:07:21 crc kubenswrapper[4651]: I1011 05:07:21.650584 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qfv4f\" (UniqueName: \"kubernetes.io/projected/d8996cb1-d92c-4ec2-96b3-0aa6f643c3da-kube-api-access-qfv4f\") pod \"barbican-db-create-bbb7v\" (UID: \"d8996cb1-d92c-4ec2-96b3-0aa6f643c3da\") " pod="openstack/barbican-db-create-bbb7v" Oct 11 05:07:21 crc kubenswrapper[4651]: I1011 05:07:21.650647 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d56ql\" (UniqueName: \"kubernetes.io/projected/f0c34b4f-c8ab-4940-9c75-82d4b4e6988c-kube-api-access-d56ql\") pod \"keystone-db-sync-ktc4s\" (UID: \"f0c34b4f-c8ab-4940-9c75-82d4b4e6988c\") " pod="openstack/keystone-db-sync-ktc4s" Oct 11 05:07:21 crc kubenswrapper[4651]: I1011 05:07:21.650679 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f0c34b4f-c8ab-4940-9c75-82d4b4e6988c-combined-ca-bundle\") pod \"keystone-db-sync-ktc4s\" (UID: \"f0c34b4f-c8ab-4940-9c75-82d4b4e6988c\") " pod="openstack/keystone-db-sync-ktc4s" Oct 11 05:07:21 crc kubenswrapper[4651]: I1011 05:07:21.650877 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nch2s\" (UniqueName: \"kubernetes.io/projected/a968fd78-9f70-4494-8d24-604cf4a4d8a1-kube-api-access-nch2s\") pod \"neutron-db-create-n4zlv\" (UID: \"a968fd78-9f70-4494-8d24-604cf4a4d8a1\") " pod="openstack/neutron-db-create-n4zlv" Oct 11 05:07:21 crc kubenswrapper[4651]: I1011 05:07:21.651002 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f0c34b4f-c8ab-4940-9c75-82d4b4e6988c-config-data\") pod \"keystone-db-sync-ktc4s\" (UID: \"f0c34b4f-c8ab-4940-9c75-82d4b4e6988c\") " pod="openstack/keystone-db-sync-ktc4s" Oct 11 05:07:21 crc kubenswrapper[4651]: I1011 05:07:21.674356 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qfv4f\" (UniqueName: \"kubernetes.io/projected/d8996cb1-d92c-4ec2-96b3-0aa6f643c3da-kube-api-access-qfv4f\") pod \"barbican-db-create-bbb7v\" (UID: \"d8996cb1-d92c-4ec2-96b3-0aa6f643c3da\") " pod="openstack/barbican-db-create-bbb7v" Oct 11 05:07:21 crc kubenswrapper[4651]: I1011 05:07:21.714490 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-bbb7v" Oct 11 05:07:21 crc kubenswrapper[4651]: I1011 05:07:21.752160 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d56ql\" (UniqueName: \"kubernetes.io/projected/f0c34b4f-c8ab-4940-9c75-82d4b4e6988c-kube-api-access-d56ql\") pod \"keystone-db-sync-ktc4s\" (UID: \"f0c34b4f-c8ab-4940-9c75-82d4b4e6988c\") " pod="openstack/keystone-db-sync-ktc4s" Oct 11 05:07:21 crc kubenswrapper[4651]: I1011 05:07:21.752206 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f0c34b4f-c8ab-4940-9c75-82d4b4e6988c-combined-ca-bundle\") pod \"keystone-db-sync-ktc4s\" (UID: \"f0c34b4f-c8ab-4940-9c75-82d4b4e6988c\") " pod="openstack/keystone-db-sync-ktc4s" Oct 11 05:07:21 crc kubenswrapper[4651]: I1011 05:07:21.752248 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nch2s\" (UniqueName: \"kubernetes.io/projected/a968fd78-9f70-4494-8d24-604cf4a4d8a1-kube-api-access-nch2s\") pod \"neutron-db-create-n4zlv\" (UID: \"a968fd78-9f70-4494-8d24-604cf4a4d8a1\") " pod="openstack/neutron-db-create-n4zlv" Oct 11 05:07:21 crc kubenswrapper[4651]: I1011 05:07:21.752280 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f0c34b4f-c8ab-4940-9c75-82d4b4e6988c-config-data\") pod \"keystone-db-sync-ktc4s\" (UID: \"f0c34b4f-c8ab-4940-9c75-82d4b4e6988c\") " pod="openstack/keystone-db-sync-ktc4s" Oct 11 05:07:21 crc kubenswrapper[4651]: I1011 05:07:21.758395 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f0c34b4f-c8ab-4940-9c75-82d4b4e6988c-config-data\") pod \"keystone-db-sync-ktc4s\" (UID: \"f0c34b4f-c8ab-4940-9c75-82d4b4e6988c\") " pod="openstack/keystone-db-sync-ktc4s" Oct 11 05:07:21 crc kubenswrapper[4651]: I1011 05:07:21.760619 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f0c34b4f-c8ab-4940-9c75-82d4b4e6988c-combined-ca-bundle\") pod \"keystone-db-sync-ktc4s\" (UID: \"f0c34b4f-c8ab-4940-9c75-82d4b4e6988c\") " pod="openstack/keystone-db-sync-ktc4s" Oct 11 05:07:21 crc kubenswrapper[4651]: I1011 05:07:21.772099 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nch2s\" (UniqueName: \"kubernetes.io/projected/a968fd78-9f70-4494-8d24-604cf4a4d8a1-kube-api-access-nch2s\") pod \"neutron-db-create-n4zlv\" (UID: \"a968fd78-9f70-4494-8d24-604cf4a4d8a1\") " pod="openstack/neutron-db-create-n4zlv" Oct 11 05:07:21 crc kubenswrapper[4651]: I1011 05:07:21.773358 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d56ql\" (UniqueName: \"kubernetes.io/projected/f0c34b4f-c8ab-4940-9c75-82d4b4e6988c-kube-api-access-d56ql\") pod \"keystone-db-sync-ktc4s\" (UID: \"f0c34b4f-c8ab-4940-9c75-82d4b4e6988c\") " pod="openstack/keystone-db-sync-ktc4s" Oct 11 05:07:21 crc kubenswrapper[4651]: I1011 05:07:21.828609 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-n4zlv" Oct 11 05:07:21 crc kubenswrapper[4651]: I1011 05:07:21.938031 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-ktc4s" Oct 11 05:07:22 crc kubenswrapper[4651]: I1011 05:07:22.162398 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-mkkvr"] Oct 11 05:07:22 crc kubenswrapper[4651]: W1011 05:07:22.169731 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode88f1ae5_9219_4c23_b6e2_a115005a1011.slice/crio-c5677e3a9a9becda0316d7729a1a55b4386c9618cb63079957834f0e169f7731 WatchSource:0}: Error finding container c5677e3a9a9becda0316d7729a1a55b4386c9618cb63079957834f0e169f7731: Status 404 returned error can't find the container with id c5677e3a9a9becda0316d7729a1a55b4386c9618cb63079957834f0e169f7731 Oct 11 05:07:22 crc kubenswrapper[4651]: I1011 05:07:22.213604 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-ktc4s"] Oct 11 05:07:22 crc kubenswrapper[4651]: W1011 05:07:22.217136 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf0c34b4f_c8ab_4940_9c75_82d4b4e6988c.slice/crio-92573340dadd8834c8e8038c0c67b21c159ad2a5598c699e74f7dfe2e949f87c WatchSource:0}: Error finding container 92573340dadd8834c8e8038c0c67b21c159ad2a5598c699e74f7dfe2e949f87c: Status 404 returned error can't find the container with id 92573340dadd8834c8e8038c0c67b21c159ad2a5598c699e74f7dfe2e949f87c Oct 11 05:07:22 crc kubenswrapper[4651]: I1011 05:07:22.222458 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-bbb7v"] Oct 11 05:07:22 crc kubenswrapper[4651]: W1011 05:07:22.249398 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd8996cb1_d92c_4ec2_96b3_0aa6f643c3da.slice/crio-006883daad8892695c0adfc5fa7d241005a1c054f5b7f035db565892d6636a8e WatchSource:0}: Error finding container 006883daad8892695c0adfc5fa7d241005a1c054f5b7f035db565892d6636a8e: Status 404 returned error can't find the container with id 006883daad8892695c0adfc5fa7d241005a1c054f5b7f035db565892d6636a8e Oct 11 05:07:22 crc kubenswrapper[4651]: I1011 05:07:22.333068 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-n4zlv"] Oct 11 05:07:22 crc kubenswrapper[4651]: W1011 05:07:22.343013 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda968fd78_9f70_4494_8d24_604cf4a4d8a1.slice/crio-5271e519fc5b430ec7b298922a6ed9a3250b66da7863538827ba34ac0e8856ee WatchSource:0}: Error finding container 5271e519fc5b430ec7b298922a6ed9a3250b66da7863538827ba34ac0e8856ee: Status 404 returned error can't find the container with id 5271e519fc5b430ec7b298922a6ed9a3250b66da7863538827ba34ac0e8856ee Oct 11 05:07:22 crc kubenswrapper[4651]: I1011 05:07:22.974165 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-ktc4s" event={"ID":"f0c34b4f-c8ab-4940-9c75-82d4b4e6988c","Type":"ContainerStarted","Data":"92573340dadd8834c8e8038c0c67b21c159ad2a5598c699e74f7dfe2e949f87c"} Oct 11 05:07:22 crc kubenswrapper[4651]: I1011 05:07:22.976027 4651 generic.go:334] "Generic (PLEG): container finished" podID="e88f1ae5-9219-4c23-b6e2-a115005a1011" containerID="6431114d06a5d46b7d8f1f38cba284b6ee35a0e03e734502f422a31ba1a761ed" exitCode=0 Oct 11 05:07:22 crc kubenswrapper[4651]: I1011 05:07:22.976094 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-mkkvr" event={"ID":"e88f1ae5-9219-4c23-b6e2-a115005a1011","Type":"ContainerDied","Data":"6431114d06a5d46b7d8f1f38cba284b6ee35a0e03e734502f422a31ba1a761ed"} Oct 11 05:07:22 crc kubenswrapper[4651]: I1011 05:07:22.976119 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-mkkvr" event={"ID":"e88f1ae5-9219-4c23-b6e2-a115005a1011","Type":"ContainerStarted","Data":"c5677e3a9a9becda0316d7729a1a55b4386c9618cb63079957834f0e169f7731"} Oct 11 05:07:22 crc kubenswrapper[4651]: I1011 05:07:22.978210 4651 generic.go:334] "Generic (PLEG): container finished" podID="a968fd78-9f70-4494-8d24-604cf4a4d8a1" containerID="ca5b2fe537d7de541b37559141ad1fef14b5b5ca8227cbc0512f86b8dc0faaf0" exitCode=0 Oct 11 05:07:22 crc kubenswrapper[4651]: I1011 05:07:22.978274 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-n4zlv" event={"ID":"a968fd78-9f70-4494-8d24-604cf4a4d8a1","Type":"ContainerDied","Data":"ca5b2fe537d7de541b37559141ad1fef14b5b5ca8227cbc0512f86b8dc0faaf0"} Oct 11 05:07:22 crc kubenswrapper[4651]: I1011 05:07:22.978302 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-n4zlv" event={"ID":"a968fd78-9f70-4494-8d24-604cf4a4d8a1","Type":"ContainerStarted","Data":"5271e519fc5b430ec7b298922a6ed9a3250b66da7863538827ba34ac0e8856ee"} Oct 11 05:07:22 crc kubenswrapper[4651]: I1011 05:07:22.980793 4651 generic.go:334] "Generic (PLEG): container finished" podID="d8996cb1-d92c-4ec2-96b3-0aa6f643c3da" containerID="8d2ac2e46cd43ac79eb33b8368a757f4b45376885b8476503c15e0c05de24c2c" exitCode=0 Oct 11 05:07:22 crc kubenswrapper[4651]: I1011 05:07:22.980846 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-bbb7v" event={"ID":"d8996cb1-d92c-4ec2-96b3-0aa6f643c3da","Type":"ContainerDied","Data":"8d2ac2e46cd43ac79eb33b8368a757f4b45376885b8476503c15e0c05de24c2c"} Oct 11 05:07:22 crc kubenswrapper[4651]: I1011 05:07:22.980873 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-bbb7v" event={"ID":"d8996cb1-d92c-4ec2-96b3-0aa6f643c3da","Type":"ContainerStarted","Data":"006883daad8892695c0adfc5fa7d241005a1c054f5b7f035db565892d6636a8e"} Oct 11 05:07:24 crc kubenswrapper[4651]: I1011 05:07:24.484725 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-bbb7v" Oct 11 05:07:24 crc kubenswrapper[4651]: I1011 05:07:24.489217 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-mkkvr" Oct 11 05:07:24 crc kubenswrapper[4651]: I1011 05:07:24.495471 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-n4zlv" Oct 11 05:07:24 crc kubenswrapper[4651]: I1011 05:07:24.627742 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qfv4f\" (UniqueName: \"kubernetes.io/projected/d8996cb1-d92c-4ec2-96b3-0aa6f643c3da-kube-api-access-qfv4f\") pod \"d8996cb1-d92c-4ec2-96b3-0aa6f643c3da\" (UID: \"d8996cb1-d92c-4ec2-96b3-0aa6f643c3da\") " Oct 11 05:07:24 crc kubenswrapper[4651]: I1011 05:07:24.627978 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z7ltd\" (UniqueName: \"kubernetes.io/projected/e88f1ae5-9219-4c23-b6e2-a115005a1011-kube-api-access-z7ltd\") pod \"e88f1ae5-9219-4c23-b6e2-a115005a1011\" (UID: \"e88f1ae5-9219-4c23-b6e2-a115005a1011\") " Oct 11 05:07:24 crc kubenswrapper[4651]: I1011 05:07:24.628091 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nch2s\" (UniqueName: \"kubernetes.io/projected/a968fd78-9f70-4494-8d24-604cf4a4d8a1-kube-api-access-nch2s\") pod \"a968fd78-9f70-4494-8d24-604cf4a4d8a1\" (UID: \"a968fd78-9f70-4494-8d24-604cf4a4d8a1\") " Oct 11 05:07:24 crc kubenswrapper[4651]: I1011 05:07:24.635986 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e88f1ae5-9219-4c23-b6e2-a115005a1011-kube-api-access-z7ltd" (OuterVolumeSpecName: "kube-api-access-z7ltd") pod "e88f1ae5-9219-4c23-b6e2-a115005a1011" (UID: "e88f1ae5-9219-4c23-b6e2-a115005a1011"). InnerVolumeSpecName "kube-api-access-z7ltd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:07:24 crc kubenswrapper[4651]: I1011 05:07:24.636041 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d8996cb1-d92c-4ec2-96b3-0aa6f643c3da-kube-api-access-qfv4f" (OuterVolumeSpecName: "kube-api-access-qfv4f") pod "d8996cb1-d92c-4ec2-96b3-0aa6f643c3da" (UID: "d8996cb1-d92c-4ec2-96b3-0aa6f643c3da"). InnerVolumeSpecName "kube-api-access-qfv4f". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:07:24 crc kubenswrapper[4651]: I1011 05:07:24.636059 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a968fd78-9f70-4494-8d24-604cf4a4d8a1-kube-api-access-nch2s" (OuterVolumeSpecName: "kube-api-access-nch2s") pod "a968fd78-9f70-4494-8d24-604cf4a4d8a1" (UID: "a968fd78-9f70-4494-8d24-604cf4a4d8a1"). InnerVolumeSpecName "kube-api-access-nch2s". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:07:24 crc kubenswrapper[4651]: I1011 05:07:24.730008 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z7ltd\" (UniqueName: \"kubernetes.io/projected/e88f1ae5-9219-4c23-b6e2-a115005a1011-kube-api-access-z7ltd\") on node \"crc\" DevicePath \"\"" Oct 11 05:07:24 crc kubenswrapper[4651]: I1011 05:07:24.730046 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nch2s\" (UniqueName: \"kubernetes.io/projected/a968fd78-9f70-4494-8d24-604cf4a4d8a1-kube-api-access-nch2s\") on node \"crc\" DevicePath \"\"" Oct 11 05:07:24 crc kubenswrapper[4651]: I1011 05:07:24.730056 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qfv4f\" (UniqueName: \"kubernetes.io/projected/d8996cb1-d92c-4ec2-96b3-0aa6f643c3da-kube-api-access-qfv4f\") on node \"crc\" DevicePath \"\"" Oct 11 05:07:24 crc kubenswrapper[4651]: I1011 05:07:24.996022 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-mkkvr" Oct 11 05:07:24 crc kubenswrapper[4651]: I1011 05:07:24.996225 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-mkkvr" event={"ID":"e88f1ae5-9219-4c23-b6e2-a115005a1011","Type":"ContainerDied","Data":"c5677e3a9a9becda0316d7729a1a55b4386c9618cb63079957834f0e169f7731"} Oct 11 05:07:24 crc kubenswrapper[4651]: I1011 05:07:24.997035 4651 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c5677e3a9a9becda0316d7729a1a55b4386c9618cb63079957834f0e169f7731" Oct 11 05:07:24 crc kubenswrapper[4651]: I1011 05:07:24.999218 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-n4zlv" event={"ID":"a968fd78-9f70-4494-8d24-604cf4a4d8a1","Type":"ContainerDied","Data":"5271e519fc5b430ec7b298922a6ed9a3250b66da7863538827ba34ac0e8856ee"} Oct 11 05:07:24 crc kubenswrapper[4651]: I1011 05:07:24.999260 4651 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5271e519fc5b430ec7b298922a6ed9a3250b66da7863538827ba34ac0e8856ee" Oct 11 05:07:24 crc kubenswrapper[4651]: I1011 05:07:24.999311 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-n4zlv" Oct 11 05:07:25 crc kubenswrapper[4651]: I1011 05:07:25.000835 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-bbb7v" event={"ID":"d8996cb1-d92c-4ec2-96b3-0aa6f643c3da","Type":"ContainerDied","Data":"006883daad8892695c0adfc5fa7d241005a1c054f5b7f035db565892d6636a8e"} Oct 11 05:07:25 crc kubenswrapper[4651]: I1011 05:07:25.000872 4651 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="006883daad8892695c0adfc5fa7d241005a1c054f5b7f035db565892d6636a8e" Oct 11 05:07:25 crc kubenswrapper[4651]: I1011 05:07:25.000938 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-bbb7v" Oct 11 05:07:27 crc kubenswrapper[4651]: I1011 05:07:27.487096 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6d5b6d6b67-s97wk" Oct 11 05:07:27 crc kubenswrapper[4651]: I1011 05:07:27.583450 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-zvr5f"] Oct 11 05:07:27 crc kubenswrapper[4651]: I1011 05:07:27.583706 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-b8fbc5445-zvr5f" podUID="64f28073-6c52-4fe5-8474-f976cd26edc0" containerName="dnsmasq-dns" containerID="cri-o://f3a6047fe5e61fedfb482f252d75988656a6f622f04e5734a0b333c33ab8319c" gracePeriod=10 Oct 11 05:07:29 crc kubenswrapper[4651]: I1011 05:07:29.036837 4651 generic.go:334] "Generic (PLEG): container finished" podID="64f28073-6c52-4fe5-8474-f976cd26edc0" containerID="f3a6047fe5e61fedfb482f252d75988656a6f622f04e5734a0b333c33ab8319c" exitCode=0 Oct 11 05:07:29 crc kubenswrapper[4651]: I1011 05:07:29.036859 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-zvr5f" event={"ID":"64f28073-6c52-4fe5-8474-f976cd26edc0","Type":"ContainerDied","Data":"f3a6047fe5e61fedfb482f252d75988656a6f622f04e5734a0b333c33ab8319c"} Oct 11 05:07:30 crc kubenswrapper[4651]: I1011 05:07:30.046646 4651 generic.go:334] "Generic (PLEG): container finished" podID="d7e37fff-86fd-435f-b124-27f7c2afb74d" containerID="5063e393f084e925c50d94f5fe68ea874eea10559384d86483c21265911bee04" exitCode=0 Oct 11 05:07:30 crc kubenswrapper[4651]: I1011 05:07:30.046770 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-j5wrj" event={"ID":"d7e37fff-86fd-435f-b124-27f7c2afb74d","Type":"ContainerDied","Data":"5063e393f084e925c50d94f5fe68ea874eea10559384d86483c21265911bee04"} Oct 11 05:07:30 crc kubenswrapper[4651]: I1011 05:07:30.846718 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8fbc5445-zvr5f" Oct 11 05:07:30 crc kubenswrapper[4651]: I1011 05:07:30.944381 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/64f28073-6c52-4fe5-8474-f976cd26edc0-ovsdbserver-sb\") pod \"64f28073-6c52-4fe5-8474-f976cd26edc0\" (UID: \"64f28073-6c52-4fe5-8474-f976cd26edc0\") " Oct 11 05:07:30 crc kubenswrapper[4651]: I1011 05:07:30.945120 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/64f28073-6c52-4fe5-8474-f976cd26edc0-config\") pod \"64f28073-6c52-4fe5-8474-f976cd26edc0\" (UID: \"64f28073-6c52-4fe5-8474-f976cd26edc0\") " Oct 11 05:07:30 crc kubenswrapper[4651]: I1011 05:07:30.945360 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/64f28073-6c52-4fe5-8474-f976cd26edc0-ovsdbserver-nb\") pod \"64f28073-6c52-4fe5-8474-f976cd26edc0\" (UID: \"64f28073-6c52-4fe5-8474-f976cd26edc0\") " Oct 11 05:07:30 crc kubenswrapper[4651]: I1011 05:07:30.945675 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9q9td\" (UniqueName: \"kubernetes.io/projected/64f28073-6c52-4fe5-8474-f976cd26edc0-kube-api-access-9q9td\") pod \"64f28073-6c52-4fe5-8474-f976cd26edc0\" (UID: \"64f28073-6c52-4fe5-8474-f976cd26edc0\") " Oct 11 05:07:30 crc kubenswrapper[4651]: I1011 05:07:30.945883 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/64f28073-6c52-4fe5-8474-f976cd26edc0-dns-svc\") pod \"64f28073-6c52-4fe5-8474-f976cd26edc0\" (UID: \"64f28073-6c52-4fe5-8474-f976cd26edc0\") " Oct 11 05:07:30 crc kubenswrapper[4651]: I1011 05:07:30.950116 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/64f28073-6c52-4fe5-8474-f976cd26edc0-kube-api-access-9q9td" (OuterVolumeSpecName: "kube-api-access-9q9td") pod "64f28073-6c52-4fe5-8474-f976cd26edc0" (UID: "64f28073-6c52-4fe5-8474-f976cd26edc0"). InnerVolumeSpecName "kube-api-access-9q9td". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:07:30 crc kubenswrapper[4651]: I1011 05:07:30.986088 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/64f28073-6c52-4fe5-8474-f976cd26edc0-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "64f28073-6c52-4fe5-8474-f976cd26edc0" (UID: "64f28073-6c52-4fe5-8474-f976cd26edc0"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:07:30 crc kubenswrapper[4651]: I1011 05:07:30.990529 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/64f28073-6c52-4fe5-8474-f976cd26edc0-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "64f28073-6c52-4fe5-8474-f976cd26edc0" (UID: "64f28073-6c52-4fe5-8474-f976cd26edc0"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:07:31 crc kubenswrapper[4651]: I1011 05:07:31.001778 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/64f28073-6c52-4fe5-8474-f976cd26edc0-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "64f28073-6c52-4fe5-8474-f976cd26edc0" (UID: "64f28073-6c52-4fe5-8474-f976cd26edc0"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:07:31 crc kubenswrapper[4651]: I1011 05:07:31.003187 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/64f28073-6c52-4fe5-8474-f976cd26edc0-config" (OuterVolumeSpecName: "config") pod "64f28073-6c52-4fe5-8474-f976cd26edc0" (UID: "64f28073-6c52-4fe5-8474-f976cd26edc0"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:07:31 crc kubenswrapper[4651]: I1011 05:07:31.047638 4651 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/64f28073-6c52-4fe5-8474-f976cd26edc0-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 11 05:07:31 crc kubenswrapper[4651]: I1011 05:07:31.047672 4651 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/64f28073-6c52-4fe5-8474-f976cd26edc0-config\") on node \"crc\" DevicePath \"\"" Oct 11 05:07:31 crc kubenswrapper[4651]: I1011 05:07:31.047681 4651 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/64f28073-6c52-4fe5-8474-f976cd26edc0-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 11 05:07:31 crc kubenswrapper[4651]: I1011 05:07:31.047689 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9q9td\" (UniqueName: \"kubernetes.io/projected/64f28073-6c52-4fe5-8474-f976cd26edc0-kube-api-access-9q9td\") on node \"crc\" DevicePath \"\"" Oct 11 05:07:31 crc kubenswrapper[4651]: I1011 05:07:31.047699 4651 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/64f28073-6c52-4fe5-8474-f976cd26edc0-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 11 05:07:31 crc kubenswrapper[4651]: I1011 05:07:31.058688 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-ktc4s" event={"ID":"f0c34b4f-c8ab-4940-9c75-82d4b4e6988c","Type":"ContainerStarted","Data":"dc39a8e7d062d4ad68ccdd4434f8ebd500d40b930224b8075357dab447b1bd22"} Oct 11 05:07:31 crc kubenswrapper[4651]: I1011 05:07:31.061703 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8fbc5445-zvr5f" Oct 11 05:07:31 crc kubenswrapper[4651]: I1011 05:07:31.061705 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-zvr5f" event={"ID":"64f28073-6c52-4fe5-8474-f976cd26edc0","Type":"ContainerDied","Data":"0faf20d50dc1b9a257edd1fa98a1dc72f33c4bf7fcbe1c8f909cdf6272dea9da"} Oct 11 05:07:31 crc kubenswrapper[4651]: I1011 05:07:31.062115 4651 scope.go:117] "RemoveContainer" containerID="f3a6047fe5e61fedfb482f252d75988656a6f622f04e5734a0b333c33ab8319c" Oct 11 05:07:31 crc kubenswrapper[4651]: I1011 05:07:31.090492 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-ktc4s" podStartSLOduration=1.477153436 podStartE2EDuration="10.090461182s" podCreationTimestamp="2025-10-11 05:07:21 +0000 UTC" firstStartedPulling="2025-10-11 05:07:22.220007613 +0000 UTC m=+963.116240409" lastFinishedPulling="2025-10-11 05:07:30.833315319 +0000 UTC m=+971.729548155" observedRunningTime="2025-10-11 05:07:31.082897429 +0000 UTC m=+971.979130255" watchObservedRunningTime="2025-10-11 05:07:31.090461182 +0000 UTC m=+971.986694018" Oct 11 05:07:31 crc kubenswrapper[4651]: I1011 05:07:31.114174 4651 scope.go:117] "RemoveContainer" containerID="d7aa98064ec9b79c4b78e88a94d3b9f892700455b99c5e666b6087eed94f4fcb" Oct 11 05:07:31 crc kubenswrapper[4651]: I1011 05:07:31.119952 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-zvr5f"] Oct 11 05:07:31 crc kubenswrapper[4651]: I1011 05:07:31.127497 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-zvr5f"] Oct 11 05:07:31 crc kubenswrapper[4651]: I1011 05:07:31.383862 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-j5wrj" Oct 11 05:07:31 crc kubenswrapper[4651]: I1011 05:07:31.430632 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-5e94-account-create-qxkc6"] Oct 11 05:07:31 crc kubenswrapper[4651]: E1011 05:07:31.430984 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64f28073-6c52-4fe5-8474-f976cd26edc0" containerName="dnsmasq-dns" Oct 11 05:07:31 crc kubenswrapper[4651]: I1011 05:07:31.431002 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="64f28073-6c52-4fe5-8474-f976cd26edc0" containerName="dnsmasq-dns" Oct 11 05:07:31 crc kubenswrapper[4651]: E1011 05:07:31.431018 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d7e37fff-86fd-435f-b124-27f7c2afb74d" containerName="glance-db-sync" Oct 11 05:07:31 crc kubenswrapper[4651]: I1011 05:07:31.431026 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="d7e37fff-86fd-435f-b124-27f7c2afb74d" containerName="glance-db-sync" Oct 11 05:07:31 crc kubenswrapper[4651]: E1011 05:07:31.431039 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e88f1ae5-9219-4c23-b6e2-a115005a1011" containerName="mariadb-database-create" Oct 11 05:07:31 crc kubenswrapper[4651]: I1011 05:07:31.431045 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="e88f1ae5-9219-4c23-b6e2-a115005a1011" containerName="mariadb-database-create" Oct 11 05:07:31 crc kubenswrapper[4651]: E1011 05:07:31.431060 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a968fd78-9f70-4494-8d24-604cf4a4d8a1" containerName="mariadb-database-create" Oct 11 05:07:31 crc kubenswrapper[4651]: I1011 05:07:31.431066 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="a968fd78-9f70-4494-8d24-604cf4a4d8a1" containerName="mariadb-database-create" Oct 11 05:07:31 crc kubenswrapper[4651]: E1011 05:07:31.431078 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64f28073-6c52-4fe5-8474-f976cd26edc0" containerName="init" Oct 11 05:07:31 crc kubenswrapper[4651]: I1011 05:07:31.431084 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="64f28073-6c52-4fe5-8474-f976cd26edc0" containerName="init" Oct 11 05:07:31 crc kubenswrapper[4651]: E1011 05:07:31.431097 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d8996cb1-d92c-4ec2-96b3-0aa6f643c3da" containerName="mariadb-database-create" Oct 11 05:07:31 crc kubenswrapper[4651]: I1011 05:07:31.431104 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="d8996cb1-d92c-4ec2-96b3-0aa6f643c3da" containerName="mariadb-database-create" Oct 11 05:07:31 crc kubenswrapper[4651]: I1011 05:07:31.431264 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="d7e37fff-86fd-435f-b124-27f7c2afb74d" containerName="glance-db-sync" Oct 11 05:07:31 crc kubenswrapper[4651]: I1011 05:07:31.431281 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="e88f1ae5-9219-4c23-b6e2-a115005a1011" containerName="mariadb-database-create" Oct 11 05:07:31 crc kubenswrapper[4651]: I1011 05:07:31.431299 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="64f28073-6c52-4fe5-8474-f976cd26edc0" containerName="dnsmasq-dns" Oct 11 05:07:31 crc kubenswrapper[4651]: I1011 05:07:31.431314 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="d8996cb1-d92c-4ec2-96b3-0aa6f643c3da" containerName="mariadb-database-create" Oct 11 05:07:31 crc kubenswrapper[4651]: I1011 05:07:31.431327 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="a968fd78-9f70-4494-8d24-604cf4a4d8a1" containerName="mariadb-database-create" Oct 11 05:07:31 crc kubenswrapper[4651]: I1011 05:07:31.431880 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-5e94-account-create-qxkc6" Oct 11 05:07:31 crc kubenswrapper[4651]: I1011 05:07:31.437373 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Oct 11 05:07:31 crc kubenswrapper[4651]: I1011 05:07:31.443872 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-5e94-account-create-qxkc6"] Oct 11 05:07:31 crc kubenswrapper[4651]: I1011 05:07:31.452582 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jcxlz\" (UniqueName: \"kubernetes.io/projected/d7e37fff-86fd-435f-b124-27f7c2afb74d-kube-api-access-jcxlz\") pod \"d7e37fff-86fd-435f-b124-27f7c2afb74d\" (UID: \"d7e37fff-86fd-435f-b124-27f7c2afb74d\") " Oct 11 05:07:31 crc kubenswrapper[4651]: I1011 05:07:31.452713 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/d7e37fff-86fd-435f-b124-27f7c2afb74d-db-sync-config-data\") pod \"d7e37fff-86fd-435f-b124-27f7c2afb74d\" (UID: \"d7e37fff-86fd-435f-b124-27f7c2afb74d\") " Oct 11 05:07:31 crc kubenswrapper[4651]: I1011 05:07:31.452784 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d7e37fff-86fd-435f-b124-27f7c2afb74d-config-data\") pod \"d7e37fff-86fd-435f-b124-27f7c2afb74d\" (UID: \"d7e37fff-86fd-435f-b124-27f7c2afb74d\") " Oct 11 05:07:31 crc kubenswrapper[4651]: I1011 05:07:31.452881 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7e37fff-86fd-435f-b124-27f7c2afb74d-combined-ca-bundle\") pod \"d7e37fff-86fd-435f-b124-27f7c2afb74d\" (UID: \"d7e37fff-86fd-435f-b124-27f7c2afb74d\") " Oct 11 05:07:31 crc kubenswrapper[4651]: I1011 05:07:31.458345 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d7e37fff-86fd-435f-b124-27f7c2afb74d-kube-api-access-jcxlz" (OuterVolumeSpecName: "kube-api-access-jcxlz") pod "d7e37fff-86fd-435f-b124-27f7c2afb74d" (UID: "d7e37fff-86fd-435f-b124-27f7c2afb74d"). InnerVolumeSpecName "kube-api-access-jcxlz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:07:31 crc kubenswrapper[4651]: I1011 05:07:31.458439 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7e37fff-86fd-435f-b124-27f7c2afb74d-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "d7e37fff-86fd-435f-b124-27f7c2afb74d" (UID: "d7e37fff-86fd-435f-b124-27f7c2afb74d"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:07:31 crc kubenswrapper[4651]: I1011 05:07:31.476089 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7e37fff-86fd-435f-b124-27f7c2afb74d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d7e37fff-86fd-435f-b124-27f7c2afb74d" (UID: "d7e37fff-86fd-435f-b124-27f7c2afb74d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:07:31 crc kubenswrapper[4651]: I1011 05:07:31.499628 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7e37fff-86fd-435f-b124-27f7c2afb74d-config-data" (OuterVolumeSpecName: "config-data") pod "d7e37fff-86fd-435f-b124-27f7c2afb74d" (UID: "d7e37fff-86fd-435f-b124-27f7c2afb74d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:07:31 crc kubenswrapper[4651]: I1011 05:07:31.522149 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-921e-account-create-p5bgn"] Oct 11 05:07:31 crc kubenswrapper[4651]: I1011 05:07:31.523685 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-921e-account-create-p5bgn" Oct 11 05:07:31 crc kubenswrapper[4651]: I1011 05:07:31.527044 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Oct 11 05:07:31 crc kubenswrapper[4651]: I1011 05:07:31.531508 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-921e-account-create-p5bgn"] Oct 11 05:07:31 crc kubenswrapper[4651]: I1011 05:07:31.555301 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m7n7d\" (UniqueName: \"kubernetes.io/projected/8a197d94-8ca4-4992-9bb9-f6c6d42a1351-kube-api-access-m7n7d\") pod \"barbican-5e94-account-create-qxkc6\" (UID: \"8a197d94-8ca4-4992-9bb9-f6c6d42a1351\") " pod="openstack/barbican-5e94-account-create-qxkc6" Oct 11 05:07:31 crc kubenswrapper[4651]: I1011 05:07:31.555407 4651 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7e37fff-86fd-435f-b124-27f7c2afb74d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 05:07:31 crc kubenswrapper[4651]: I1011 05:07:31.555431 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jcxlz\" (UniqueName: \"kubernetes.io/projected/d7e37fff-86fd-435f-b124-27f7c2afb74d-kube-api-access-jcxlz\") on node \"crc\" DevicePath \"\"" Oct 11 05:07:31 crc kubenswrapper[4651]: I1011 05:07:31.555445 4651 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/d7e37fff-86fd-435f-b124-27f7c2afb74d-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Oct 11 05:07:31 crc kubenswrapper[4651]: I1011 05:07:31.555458 4651 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d7e37fff-86fd-435f-b124-27f7c2afb74d-config-data\") on node \"crc\" DevicePath \"\"" Oct 11 05:07:31 crc kubenswrapper[4651]: I1011 05:07:31.657333 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m7n7d\" (UniqueName: \"kubernetes.io/projected/8a197d94-8ca4-4992-9bb9-f6c6d42a1351-kube-api-access-m7n7d\") pod \"barbican-5e94-account-create-qxkc6\" (UID: \"8a197d94-8ca4-4992-9bb9-f6c6d42a1351\") " pod="openstack/barbican-5e94-account-create-qxkc6" Oct 11 05:07:31 crc kubenswrapper[4651]: I1011 05:07:31.657736 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fpvql\" (UniqueName: \"kubernetes.io/projected/3ad2c489-03ce-4435-8b12-4a7f77d12c95-kube-api-access-fpvql\") pod \"cinder-921e-account-create-p5bgn\" (UID: \"3ad2c489-03ce-4435-8b12-4a7f77d12c95\") " pod="openstack/cinder-921e-account-create-p5bgn" Oct 11 05:07:31 crc kubenswrapper[4651]: I1011 05:07:31.677017 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m7n7d\" (UniqueName: \"kubernetes.io/projected/8a197d94-8ca4-4992-9bb9-f6c6d42a1351-kube-api-access-m7n7d\") pod \"barbican-5e94-account-create-qxkc6\" (UID: \"8a197d94-8ca4-4992-9bb9-f6c6d42a1351\") " pod="openstack/barbican-5e94-account-create-qxkc6" Oct 11 05:07:31 crc kubenswrapper[4651]: I1011 05:07:31.723285 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-2878-account-create-bqv6m"] Oct 11 05:07:31 crc kubenswrapper[4651]: I1011 05:07:31.724418 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-2878-account-create-bqv6m" Oct 11 05:07:31 crc kubenswrapper[4651]: I1011 05:07:31.727173 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Oct 11 05:07:31 crc kubenswrapper[4651]: I1011 05:07:31.739643 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-2878-account-create-bqv6m"] Oct 11 05:07:31 crc kubenswrapper[4651]: I1011 05:07:31.756504 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-5e94-account-create-qxkc6" Oct 11 05:07:31 crc kubenswrapper[4651]: I1011 05:07:31.759117 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fpvql\" (UniqueName: \"kubernetes.io/projected/3ad2c489-03ce-4435-8b12-4a7f77d12c95-kube-api-access-fpvql\") pod \"cinder-921e-account-create-p5bgn\" (UID: \"3ad2c489-03ce-4435-8b12-4a7f77d12c95\") " pod="openstack/cinder-921e-account-create-p5bgn" Oct 11 05:07:31 crc kubenswrapper[4651]: I1011 05:07:31.780504 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fpvql\" (UniqueName: \"kubernetes.io/projected/3ad2c489-03ce-4435-8b12-4a7f77d12c95-kube-api-access-fpvql\") pod \"cinder-921e-account-create-p5bgn\" (UID: \"3ad2c489-03ce-4435-8b12-4a7f77d12c95\") " pod="openstack/cinder-921e-account-create-p5bgn" Oct 11 05:07:31 crc kubenswrapper[4651]: I1011 05:07:31.849161 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-921e-account-create-p5bgn" Oct 11 05:07:31 crc kubenswrapper[4651]: I1011 05:07:31.860347 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kvvh9\" (UniqueName: \"kubernetes.io/projected/286d8877-76aa-4258-a93d-da719dda8143-kube-api-access-kvvh9\") pod \"neutron-2878-account-create-bqv6m\" (UID: \"286d8877-76aa-4258-a93d-da719dda8143\") " pod="openstack/neutron-2878-account-create-bqv6m" Oct 11 05:07:31 crc kubenswrapper[4651]: I1011 05:07:31.883574 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="64f28073-6c52-4fe5-8474-f976cd26edc0" path="/var/lib/kubelet/pods/64f28073-6c52-4fe5-8474-f976cd26edc0/volumes" Oct 11 05:07:31 crc kubenswrapper[4651]: I1011 05:07:31.961701 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kvvh9\" (UniqueName: \"kubernetes.io/projected/286d8877-76aa-4258-a93d-da719dda8143-kube-api-access-kvvh9\") pod \"neutron-2878-account-create-bqv6m\" (UID: \"286d8877-76aa-4258-a93d-da719dda8143\") " pod="openstack/neutron-2878-account-create-bqv6m" Oct 11 05:07:31 crc kubenswrapper[4651]: I1011 05:07:31.979648 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kvvh9\" (UniqueName: \"kubernetes.io/projected/286d8877-76aa-4258-a93d-da719dda8143-kube-api-access-kvvh9\") pod \"neutron-2878-account-create-bqv6m\" (UID: \"286d8877-76aa-4258-a93d-da719dda8143\") " pod="openstack/neutron-2878-account-create-bqv6m" Oct 11 05:07:32 crc kubenswrapper[4651]: I1011 05:07:32.093904 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-j5wrj" Oct 11 05:07:32 crc kubenswrapper[4651]: I1011 05:07:32.094062 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-j5wrj" event={"ID":"d7e37fff-86fd-435f-b124-27f7c2afb74d","Type":"ContainerDied","Data":"be55398c99da0ad1dcb630915ff5194cf820f690396a8718af05e6dd1ac74902"} Oct 11 05:07:32 crc kubenswrapper[4651]: I1011 05:07:32.094355 4651 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="be55398c99da0ad1dcb630915ff5194cf820f690396a8718af05e6dd1ac74902" Oct 11 05:07:32 crc kubenswrapper[4651]: I1011 05:07:32.145302 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-2878-account-create-bqv6m" Oct 11 05:07:32 crc kubenswrapper[4651]: I1011 05:07:32.221548 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-5e94-account-create-qxkc6"] Oct 11 05:07:32 crc kubenswrapper[4651]: I1011 05:07:32.322511 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-921e-account-create-p5bgn"] Oct 11 05:07:32 crc kubenswrapper[4651]: I1011 05:07:32.481702 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-895cf5cf-n27b7"] Oct 11 05:07:32 crc kubenswrapper[4651]: I1011 05:07:32.483255 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-895cf5cf-n27b7" Oct 11 05:07:32 crc kubenswrapper[4651]: I1011 05:07:32.498120 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-2878-account-create-bqv6m"] Oct 11 05:07:32 crc kubenswrapper[4651]: I1011 05:07:32.518005 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-895cf5cf-n27b7"] Oct 11 05:07:32 crc kubenswrapper[4651]: I1011 05:07:32.574552 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d6e586e3-8d32-4116-a47d-fd410e4c2c2d-dns-svc\") pod \"dnsmasq-dns-895cf5cf-n27b7\" (UID: \"d6e586e3-8d32-4116-a47d-fd410e4c2c2d\") " pod="openstack/dnsmasq-dns-895cf5cf-n27b7" Oct 11 05:07:32 crc kubenswrapper[4651]: I1011 05:07:32.574654 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d6e586e3-8d32-4116-a47d-fd410e4c2c2d-ovsdbserver-nb\") pod \"dnsmasq-dns-895cf5cf-n27b7\" (UID: \"d6e586e3-8d32-4116-a47d-fd410e4c2c2d\") " pod="openstack/dnsmasq-dns-895cf5cf-n27b7" Oct 11 05:07:32 crc kubenswrapper[4651]: I1011 05:07:32.574906 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gvhmz\" (UniqueName: \"kubernetes.io/projected/d6e586e3-8d32-4116-a47d-fd410e4c2c2d-kube-api-access-gvhmz\") pod \"dnsmasq-dns-895cf5cf-n27b7\" (UID: \"d6e586e3-8d32-4116-a47d-fd410e4c2c2d\") " pod="openstack/dnsmasq-dns-895cf5cf-n27b7" Oct 11 05:07:32 crc kubenswrapper[4651]: I1011 05:07:32.575083 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d6e586e3-8d32-4116-a47d-fd410e4c2c2d-ovsdbserver-sb\") pod \"dnsmasq-dns-895cf5cf-n27b7\" (UID: \"d6e586e3-8d32-4116-a47d-fd410e4c2c2d\") " pod="openstack/dnsmasq-dns-895cf5cf-n27b7" Oct 11 05:07:32 crc kubenswrapper[4651]: I1011 05:07:32.575188 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d6e586e3-8d32-4116-a47d-fd410e4c2c2d-dns-swift-storage-0\") pod \"dnsmasq-dns-895cf5cf-n27b7\" (UID: \"d6e586e3-8d32-4116-a47d-fd410e4c2c2d\") " pod="openstack/dnsmasq-dns-895cf5cf-n27b7" Oct 11 05:07:32 crc kubenswrapper[4651]: I1011 05:07:32.575246 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d6e586e3-8d32-4116-a47d-fd410e4c2c2d-config\") pod \"dnsmasq-dns-895cf5cf-n27b7\" (UID: \"d6e586e3-8d32-4116-a47d-fd410e4c2c2d\") " pod="openstack/dnsmasq-dns-895cf5cf-n27b7" Oct 11 05:07:32 crc kubenswrapper[4651]: I1011 05:07:32.676298 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d6e586e3-8d32-4116-a47d-fd410e4c2c2d-ovsdbserver-sb\") pod \"dnsmasq-dns-895cf5cf-n27b7\" (UID: \"d6e586e3-8d32-4116-a47d-fd410e4c2c2d\") " pod="openstack/dnsmasq-dns-895cf5cf-n27b7" Oct 11 05:07:32 crc kubenswrapper[4651]: I1011 05:07:32.676365 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d6e586e3-8d32-4116-a47d-fd410e4c2c2d-dns-swift-storage-0\") pod \"dnsmasq-dns-895cf5cf-n27b7\" (UID: \"d6e586e3-8d32-4116-a47d-fd410e4c2c2d\") " pod="openstack/dnsmasq-dns-895cf5cf-n27b7" Oct 11 05:07:32 crc kubenswrapper[4651]: I1011 05:07:32.676393 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d6e586e3-8d32-4116-a47d-fd410e4c2c2d-config\") pod \"dnsmasq-dns-895cf5cf-n27b7\" (UID: \"d6e586e3-8d32-4116-a47d-fd410e4c2c2d\") " pod="openstack/dnsmasq-dns-895cf5cf-n27b7" Oct 11 05:07:32 crc kubenswrapper[4651]: I1011 05:07:32.676415 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d6e586e3-8d32-4116-a47d-fd410e4c2c2d-dns-svc\") pod \"dnsmasq-dns-895cf5cf-n27b7\" (UID: \"d6e586e3-8d32-4116-a47d-fd410e4c2c2d\") " pod="openstack/dnsmasq-dns-895cf5cf-n27b7" Oct 11 05:07:32 crc kubenswrapper[4651]: I1011 05:07:32.676439 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d6e586e3-8d32-4116-a47d-fd410e4c2c2d-ovsdbserver-nb\") pod \"dnsmasq-dns-895cf5cf-n27b7\" (UID: \"d6e586e3-8d32-4116-a47d-fd410e4c2c2d\") " pod="openstack/dnsmasq-dns-895cf5cf-n27b7" Oct 11 05:07:32 crc kubenswrapper[4651]: I1011 05:07:32.676490 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gvhmz\" (UniqueName: \"kubernetes.io/projected/d6e586e3-8d32-4116-a47d-fd410e4c2c2d-kube-api-access-gvhmz\") pod \"dnsmasq-dns-895cf5cf-n27b7\" (UID: \"d6e586e3-8d32-4116-a47d-fd410e4c2c2d\") " pod="openstack/dnsmasq-dns-895cf5cf-n27b7" Oct 11 05:07:32 crc kubenswrapper[4651]: I1011 05:07:32.677417 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d6e586e3-8d32-4116-a47d-fd410e4c2c2d-dns-swift-storage-0\") pod \"dnsmasq-dns-895cf5cf-n27b7\" (UID: \"d6e586e3-8d32-4116-a47d-fd410e4c2c2d\") " pod="openstack/dnsmasq-dns-895cf5cf-n27b7" Oct 11 05:07:32 crc kubenswrapper[4651]: I1011 05:07:32.677476 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d6e586e3-8d32-4116-a47d-fd410e4c2c2d-ovsdbserver-nb\") pod \"dnsmasq-dns-895cf5cf-n27b7\" (UID: \"d6e586e3-8d32-4116-a47d-fd410e4c2c2d\") " pod="openstack/dnsmasq-dns-895cf5cf-n27b7" Oct 11 05:07:32 crc kubenswrapper[4651]: I1011 05:07:32.677500 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d6e586e3-8d32-4116-a47d-fd410e4c2c2d-config\") pod \"dnsmasq-dns-895cf5cf-n27b7\" (UID: \"d6e586e3-8d32-4116-a47d-fd410e4c2c2d\") " pod="openstack/dnsmasq-dns-895cf5cf-n27b7" Oct 11 05:07:32 crc kubenswrapper[4651]: I1011 05:07:32.677610 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d6e586e3-8d32-4116-a47d-fd410e4c2c2d-dns-svc\") pod \"dnsmasq-dns-895cf5cf-n27b7\" (UID: \"d6e586e3-8d32-4116-a47d-fd410e4c2c2d\") " pod="openstack/dnsmasq-dns-895cf5cf-n27b7" Oct 11 05:07:32 crc kubenswrapper[4651]: I1011 05:07:32.678443 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d6e586e3-8d32-4116-a47d-fd410e4c2c2d-ovsdbserver-sb\") pod \"dnsmasq-dns-895cf5cf-n27b7\" (UID: \"d6e586e3-8d32-4116-a47d-fd410e4c2c2d\") " pod="openstack/dnsmasq-dns-895cf5cf-n27b7" Oct 11 05:07:32 crc kubenswrapper[4651]: I1011 05:07:32.695494 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gvhmz\" (UniqueName: \"kubernetes.io/projected/d6e586e3-8d32-4116-a47d-fd410e4c2c2d-kube-api-access-gvhmz\") pod \"dnsmasq-dns-895cf5cf-n27b7\" (UID: \"d6e586e3-8d32-4116-a47d-fd410e4c2c2d\") " pod="openstack/dnsmasq-dns-895cf5cf-n27b7" Oct 11 05:07:32 crc kubenswrapper[4651]: I1011 05:07:32.836658 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-895cf5cf-n27b7" Oct 11 05:07:33 crc kubenswrapper[4651]: I1011 05:07:33.102992 4651 generic.go:334] "Generic (PLEG): container finished" podID="8a197d94-8ca4-4992-9bb9-f6c6d42a1351" containerID="ec38fe3c55d9354a2bef7aab13271702e6dca918bb369a2bd1f492631bcf64e3" exitCode=0 Oct 11 05:07:33 crc kubenswrapper[4651]: I1011 05:07:33.103069 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-5e94-account-create-qxkc6" event={"ID":"8a197d94-8ca4-4992-9bb9-f6c6d42a1351","Type":"ContainerDied","Data":"ec38fe3c55d9354a2bef7aab13271702e6dca918bb369a2bd1f492631bcf64e3"} Oct 11 05:07:33 crc kubenswrapper[4651]: I1011 05:07:33.103409 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-5e94-account-create-qxkc6" event={"ID":"8a197d94-8ca4-4992-9bb9-f6c6d42a1351","Type":"ContainerStarted","Data":"b2014be329a5ba8644eb71705a55ee35b679b1cdd8ddddb8f91215385688c766"} Oct 11 05:07:33 crc kubenswrapper[4651]: I1011 05:07:33.105787 4651 generic.go:334] "Generic (PLEG): container finished" podID="3ad2c489-03ce-4435-8b12-4a7f77d12c95" containerID="032cd564ff4bc5da969f0967080218c4a351342a2e6fab664f11896488b435fa" exitCode=0 Oct 11 05:07:33 crc kubenswrapper[4651]: I1011 05:07:33.105859 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-921e-account-create-p5bgn" event={"ID":"3ad2c489-03ce-4435-8b12-4a7f77d12c95","Type":"ContainerDied","Data":"032cd564ff4bc5da969f0967080218c4a351342a2e6fab664f11896488b435fa"} Oct 11 05:07:33 crc kubenswrapper[4651]: I1011 05:07:33.105893 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-921e-account-create-p5bgn" event={"ID":"3ad2c489-03ce-4435-8b12-4a7f77d12c95","Type":"ContainerStarted","Data":"cc9e964d11e3634282a8c1868cc052500635322da34667dbb022283594fa85e6"} Oct 11 05:07:33 crc kubenswrapper[4651]: I1011 05:07:33.107759 4651 generic.go:334] "Generic (PLEG): container finished" podID="286d8877-76aa-4258-a93d-da719dda8143" containerID="45d0d53f261a983c6d235cb866663f1dd095110892989e5cfa4d5f08e9ebd055" exitCode=0 Oct 11 05:07:33 crc kubenswrapper[4651]: I1011 05:07:33.107797 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-2878-account-create-bqv6m" event={"ID":"286d8877-76aa-4258-a93d-da719dda8143","Type":"ContainerDied","Data":"45d0d53f261a983c6d235cb866663f1dd095110892989e5cfa4d5f08e9ebd055"} Oct 11 05:07:33 crc kubenswrapper[4651]: I1011 05:07:33.107838 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-2878-account-create-bqv6m" event={"ID":"286d8877-76aa-4258-a93d-da719dda8143","Type":"ContainerStarted","Data":"eb52b66e0930ec2b02accbf74658b498722694d6063906a1dd4cf96434a09575"} Oct 11 05:07:33 crc kubenswrapper[4651]: I1011 05:07:33.245665 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-895cf5cf-n27b7"] Oct 11 05:07:34 crc kubenswrapper[4651]: I1011 05:07:34.120607 4651 generic.go:334] "Generic (PLEG): container finished" podID="d6e586e3-8d32-4116-a47d-fd410e4c2c2d" containerID="12b891089e4c4450c642a0b7730af16d6077ac2bea6a07c21e3047862940ec1d" exitCode=0 Oct 11 05:07:34 crc kubenswrapper[4651]: I1011 05:07:34.120701 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-895cf5cf-n27b7" event={"ID":"d6e586e3-8d32-4116-a47d-fd410e4c2c2d","Type":"ContainerDied","Data":"12b891089e4c4450c642a0b7730af16d6077ac2bea6a07c21e3047862940ec1d"} Oct 11 05:07:34 crc kubenswrapper[4651]: I1011 05:07:34.121239 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-895cf5cf-n27b7" event={"ID":"d6e586e3-8d32-4116-a47d-fd410e4c2c2d","Type":"ContainerStarted","Data":"ad064dcd664902900a58fe4898c9ef18cf40b5620e7ebe046021642abea7d426"} Oct 11 05:07:34 crc kubenswrapper[4651]: I1011 05:07:34.123270 4651 generic.go:334] "Generic (PLEG): container finished" podID="f0c34b4f-c8ab-4940-9c75-82d4b4e6988c" containerID="dc39a8e7d062d4ad68ccdd4434f8ebd500d40b930224b8075357dab447b1bd22" exitCode=0 Oct 11 05:07:34 crc kubenswrapper[4651]: I1011 05:07:34.123350 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-ktc4s" event={"ID":"f0c34b4f-c8ab-4940-9c75-82d4b4e6988c","Type":"ContainerDied","Data":"dc39a8e7d062d4ad68ccdd4434f8ebd500d40b930224b8075357dab447b1bd22"} Oct 11 05:07:34 crc kubenswrapper[4651]: I1011 05:07:34.582511 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-921e-account-create-p5bgn" Oct 11 05:07:34 crc kubenswrapper[4651]: I1011 05:07:34.596214 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-5e94-account-create-qxkc6" Oct 11 05:07:34 crc kubenswrapper[4651]: I1011 05:07:34.622157 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fpvql\" (UniqueName: \"kubernetes.io/projected/3ad2c489-03ce-4435-8b12-4a7f77d12c95-kube-api-access-fpvql\") pod \"3ad2c489-03ce-4435-8b12-4a7f77d12c95\" (UID: \"3ad2c489-03ce-4435-8b12-4a7f77d12c95\") " Oct 11 05:07:34 crc kubenswrapper[4651]: I1011 05:07:34.627411 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-2878-account-create-bqv6m" Oct 11 05:07:34 crc kubenswrapper[4651]: I1011 05:07:34.629320 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ad2c489-03ce-4435-8b12-4a7f77d12c95-kube-api-access-fpvql" (OuterVolumeSpecName: "kube-api-access-fpvql") pod "3ad2c489-03ce-4435-8b12-4a7f77d12c95" (UID: "3ad2c489-03ce-4435-8b12-4a7f77d12c95"). InnerVolumeSpecName "kube-api-access-fpvql". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:07:34 crc kubenswrapper[4651]: I1011 05:07:34.724287 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m7n7d\" (UniqueName: \"kubernetes.io/projected/8a197d94-8ca4-4992-9bb9-f6c6d42a1351-kube-api-access-m7n7d\") pod \"8a197d94-8ca4-4992-9bb9-f6c6d42a1351\" (UID: \"8a197d94-8ca4-4992-9bb9-f6c6d42a1351\") " Oct 11 05:07:34 crc kubenswrapper[4651]: I1011 05:07:34.724472 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kvvh9\" (UniqueName: \"kubernetes.io/projected/286d8877-76aa-4258-a93d-da719dda8143-kube-api-access-kvvh9\") pod \"286d8877-76aa-4258-a93d-da719dda8143\" (UID: \"286d8877-76aa-4258-a93d-da719dda8143\") " Oct 11 05:07:34 crc kubenswrapper[4651]: I1011 05:07:34.724789 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fpvql\" (UniqueName: \"kubernetes.io/projected/3ad2c489-03ce-4435-8b12-4a7f77d12c95-kube-api-access-fpvql\") on node \"crc\" DevicePath \"\"" Oct 11 05:07:34 crc kubenswrapper[4651]: I1011 05:07:34.727178 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8a197d94-8ca4-4992-9bb9-f6c6d42a1351-kube-api-access-m7n7d" (OuterVolumeSpecName: "kube-api-access-m7n7d") pod "8a197d94-8ca4-4992-9bb9-f6c6d42a1351" (UID: "8a197d94-8ca4-4992-9bb9-f6c6d42a1351"). InnerVolumeSpecName "kube-api-access-m7n7d". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:07:34 crc kubenswrapper[4651]: I1011 05:07:34.728796 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/286d8877-76aa-4258-a93d-da719dda8143-kube-api-access-kvvh9" (OuterVolumeSpecName: "kube-api-access-kvvh9") pod "286d8877-76aa-4258-a93d-da719dda8143" (UID: "286d8877-76aa-4258-a93d-da719dda8143"). InnerVolumeSpecName "kube-api-access-kvvh9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:07:34 crc kubenswrapper[4651]: I1011 05:07:34.826718 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m7n7d\" (UniqueName: \"kubernetes.io/projected/8a197d94-8ca4-4992-9bb9-f6c6d42a1351-kube-api-access-m7n7d\") on node \"crc\" DevicePath \"\"" Oct 11 05:07:34 crc kubenswrapper[4651]: I1011 05:07:34.826763 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kvvh9\" (UniqueName: \"kubernetes.io/projected/286d8877-76aa-4258-a93d-da719dda8143-kube-api-access-kvvh9\") on node \"crc\" DevicePath \"\"" Oct 11 05:07:35 crc kubenswrapper[4651]: I1011 05:07:35.132645 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-5e94-account-create-qxkc6" event={"ID":"8a197d94-8ca4-4992-9bb9-f6c6d42a1351","Type":"ContainerDied","Data":"b2014be329a5ba8644eb71705a55ee35b679b1cdd8ddddb8f91215385688c766"} Oct 11 05:07:35 crc kubenswrapper[4651]: I1011 05:07:35.132686 4651 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b2014be329a5ba8644eb71705a55ee35b679b1cdd8ddddb8f91215385688c766" Oct 11 05:07:35 crc kubenswrapper[4651]: I1011 05:07:35.132704 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-5e94-account-create-qxkc6" Oct 11 05:07:35 crc kubenswrapper[4651]: I1011 05:07:35.134932 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-895cf5cf-n27b7" event={"ID":"d6e586e3-8d32-4116-a47d-fd410e4c2c2d","Type":"ContainerStarted","Data":"d018b4446595cff8a0ec737a2c8414a79b19c0db7a4196134a65a47cb59b50b0"} Oct 11 05:07:35 crc kubenswrapper[4651]: I1011 05:07:35.135095 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-895cf5cf-n27b7" Oct 11 05:07:35 crc kubenswrapper[4651]: I1011 05:07:35.138096 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-921e-account-create-p5bgn" event={"ID":"3ad2c489-03ce-4435-8b12-4a7f77d12c95","Type":"ContainerDied","Data":"cc9e964d11e3634282a8c1868cc052500635322da34667dbb022283594fa85e6"} Oct 11 05:07:35 crc kubenswrapper[4651]: I1011 05:07:35.138121 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-921e-account-create-p5bgn" Oct 11 05:07:35 crc kubenswrapper[4651]: I1011 05:07:35.138138 4651 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cc9e964d11e3634282a8c1868cc052500635322da34667dbb022283594fa85e6" Oct 11 05:07:35 crc kubenswrapper[4651]: I1011 05:07:35.139783 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-2878-account-create-bqv6m" event={"ID":"286d8877-76aa-4258-a93d-da719dda8143","Type":"ContainerDied","Data":"eb52b66e0930ec2b02accbf74658b498722694d6063906a1dd4cf96434a09575"} Oct 11 05:07:35 crc kubenswrapper[4651]: I1011 05:07:35.139809 4651 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="eb52b66e0930ec2b02accbf74658b498722694d6063906a1dd4cf96434a09575" Oct 11 05:07:35 crc kubenswrapper[4651]: I1011 05:07:35.139836 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-2878-account-create-bqv6m" Oct 11 05:07:35 crc kubenswrapper[4651]: I1011 05:07:35.157663 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-895cf5cf-n27b7" podStartSLOduration=3.157645686 podStartE2EDuration="3.157645686s" podCreationTimestamp="2025-10-11 05:07:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 05:07:35.153887941 +0000 UTC m=+976.050120747" watchObservedRunningTime="2025-10-11 05:07:35.157645686 +0000 UTC m=+976.053878482" Oct 11 05:07:35 crc kubenswrapper[4651]: I1011 05:07:35.484498 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-ktc4s" Oct 11 05:07:35 crc kubenswrapper[4651]: I1011 05:07:35.536989 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d56ql\" (UniqueName: \"kubernetes.io/projected/f0c34b4f-c8ab-4940-9c75-82d4b4e6988c-kube-api-access-d56ql\") pod \"f0c34b4f-c8ab-4940-9c75-82d4b4e6988c\" (UID: \"f0c34b4f-c8ab-4940-9c75-82d4b4e6988c\") " Oct 11 05:07:35 crc kubenswrapper[4651]: I1011 05:07:35.537066 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f0c34b4f-c8ab-4940-9c75-82d4b4e6988c-combined-ca-bundle\") pod \"f0c34b4f-c8ab-4940-9c75-82d4b4e6988c\" (UID: \"f0c34b4f-c8ab-4940-9c75-82d4b4e6988c\") " Oct 11 05:07:35 crc kubenswrapper[4651]: I1011 05:07:35.537089 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f0c34b4f-c8ab-4940-9c75-82d4b4e6988c-config-data\") pod \"f0c34b4f-c8ab-4940-9c75-82d4b4e6988c\" (UID: \"f0c34b4f-c8ab-4940-9c75-82d4b4e6988c\") " Oct 11 05:07:35 crc kubenswrapper[4651]: I1011 05:07:35.543270 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f0c34b4f-c8ab-4940-9c75-82d4b4e6988c-kube-api-access-d56ql" (OuterVolumeSpecName: "kube-api-access-d56ql") pod "f0c34b4f-c8ab-4940-9c75-82d4b4e6988c" (UID: "f0c34b4f-c8ab-4940-9c75-82d4b4e6988c"). InnerVolumeSpecName "kube-api-access-d56ql". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:07:35 crc kubenswrapper[4651]: I1011 05:07:35.576875 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f0c34b4f-c8ab-4940-9c75-82d4b4e6988c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f0c34b4f-c8ab-4940-9c75-82d4b4e6988c" (UID: "f0c34b4f-c8ab-4940-9c75-82d4b4e6988c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:07:35 crc kubenswrapper[4651]: I1011 05:07:35.587808 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f0c34b4f-c8ab-4940-9c75-82d4b4e6988c-config-data" (OuterVolumeSpecName: "config-data") pod "f0c34b4f-c8ab-4940-9c75-82d4b4e6988c" (UID: "f0c34b4f-c8ab-4940-9c75-82d4b4e6988c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:07:35 crc kubenswrapper[4651]: I1011 05:07:35.639222 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d56ql\" (UniqueName: \"kubernetes.io/projected/f0c34b4f-c8ab-4940-9c75-82d4b4e6988c-kube-api-access-d56ql\") on node \"crc\" DevicePath \"\"" Oct 11 05:07:35 crc kubenswrapper[4651]: I1011 05:07:35.639257 4651 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f0c34b4f-c8ab-4940-9c75-82d4b4e6988c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 05:07:35 crc kubenswrapper[4651]: I1011 05:07:35.639267 4651 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f0c34b4f-c8ab-4940-9c75-82d4b4e6988c-config-data\") on node \"crc\" DevicePath \"\"" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.148516 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-ktc4s" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.148511 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-ktc4s" event={"ID":"f0c34b4f-c8ab-4940-9c75-82d4b4e6988c","Type":"ContainerDied","Data":"92573340dadd8834c8e8038c0c67b21c159ad2a5598c699e74f7dfe2e949f87c"} Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.149481 4651 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="92573340dadd8834c8e8038c0c67b21c159ad2a5598c699e74f7dfe2e949f87c" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.438492 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-895cf5cf-n27b7"] Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.460259 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-kb7c9"] Oct 11 05:07:36 crc kubenswrapper[4651]: E1011 05:07:36.460601 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a197d94-8ca4-4992-9bb9-f6c6d42a1351" containerName="mariadb-account-create" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.460625 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a197d94-8ca4-4992-9bb9-f6c6d42a1351" containerName="mariadb-account-create" Oct 11 05:07:36 crc kubenswrapper[4651]: E1011 05:07:36.460649 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3ad2c489-03ce-4435-8b12-4a7f77d12c95" containerName="mariadb-account-create" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.460657 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="3ad2c489-03ce-4435-8b12-4a7f77d12c95" containerName="mariadb-account-create" Oct 11 05:07:36 crc kubenswrapper[4651]: E1011 05:07:36.460668 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="286d8877-76aa-4258-a93d-da719dda8143" containerName="mariadb-account-create" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.460674 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="286d8877-76aa-4258-a93d-da719dda8143" containerName="mariadb-account-create" Oct 11 05:07:36 crc kubenswrapper[4651]: E1011 05:07:36.460691 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f0c34b4f-c8ab-4940-9c75-82d4b4e6988c" containerName="keystone-db-sync" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.460697 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="f0c34b4f-c8ab-4940-9c75-82d4b4e6988c" containerName="keystone-db-sync" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.460864 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="f0c34b4f-c8ab-4940-9c75-82d4b4e6988c" containerName="keystone-db-sync" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.460881 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="286d8877-76aa-4258-a93d-da719dda8143" containerName="mariadb-account-create" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.460904 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="3ad2c489-03ce-4435-8b12-4a7f77d12c95" containerName="mariadb-account-create" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.460913 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="8a197d94-8ca4-4992-9bb9-f6c6d42a1351" containerName="mariadb-account-create" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.461476 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-kb7c9" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.464962 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.465075 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.466472 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-2zq9v" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.466730 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.483641 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6c9c9f998c-gdq6v"] Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.485308 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c9c9f998c-gdq6v" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.496902 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-kb7c9"] Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.508034 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6c9c9f998c-gdq6v"] Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.555265 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/81c9e0ff-d205-4fa2-9606-5ba89c367008-fernet-keys\") pod \"keystone-bootstrap-kb7c9\" (UID: \"81c9e0ff-d205-4fa2-9606-5ba89c367008\") " pod="openstack/keystone-bootstrap-kb7c9" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.555371 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/80d6eea1-c7e1-4f5e-aa4a-ec74723db231-ovsdbserver-sb\") pod \"dnsmasq-dns-6c9c9f998c-gdq6v\" (UID: \"80d6eea1-c7e1-4f5e-aa4a-ec74723db231\") " pod="openstack/dnsmasq-dns-6c9c9f998c-gdq6v" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.555418 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/80d6eea1-c7e1-4f5e-aa4a-ec74723db231-dns-svc\") pod \"dnsmasq-dns-6c9c9f998c-gdq6v\" (UID: \"80d6eea1-c7e1-4f5e-aa4a-ec74723db231\") " pod="openstack/dnsmasq-dns-6c9c9f998c-gdq6v" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.555456 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z4kg8\" (UniqueName: \"kubernetes.io/projected/81c9e0ff-d205-4fa2-9606-5ba89c367008-kube-api-access-z4kg8\") pod \"keystone-bootstrap-kb7c9\" (UID: \"81c9e0ff-d205-4fa2-9606-5ba89c367008\") " pod="openstack/keystone-bootstrap-kb7c9" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.555484 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/80d6eea1-c7e1-4f5e-aa4a-ec74723db231-ovsdbserver-nb\") pod \"dnsmasq-dns-6c9c9f998c-gdq6v\" (UID: \"80d6eea1-c7e1-4f5e-aa4a-ec74723db231\") " pod="openstack/dnsmasq-dns-6c9c9f998c-gdq6v" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.555512 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/80d6eea1-c7e1-4f5e-aa4a-ec74723db231-dns-swift-storage-0\") pod \"dnsmasq-dns-6c9c9f998c-gdq6v\" (UID: \"80d6eea1-c7e1-4f5e-aa4a-ec74723db231\") " pod="openstack/dnsmasq-dns-6c9c9f998c-gdq6v" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.555532 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/81c9e0ff-d205-4fa2-9606-5ba89c367008-credential-keys\") pod \"keystone-bootstrap-kb7c9\" (UID: \"81c9e0ff-d205-4fa2-9606-5ba89c367008\") " pod="openstack/keystone-bootstrap-kb7c9" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.555556 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/81c9e0ff-d205-4fa2-9606-5ba89c367008-config-data\") pod \"keystone-bootstrap-kb7c9\" (UID: \"81c9e0ff-d205-4fa2-9606-5ba89c367008\") " pod="openstack/keystone-bootstrap-kb7c9" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.555576 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5v2x2\" (UniqueName: \"kubernetes.io/projected/80d6eea1-c7e1-4f5e-aa4a-ec74723db231-kube-api-access-5v2x2\") pod \"dnsmasq-dns-6c9c9f998c-gdq6v\" (UID: \"80d6eea1-c7e1-4f5e-aa4a-ec74723db231\") " pod="openstack/dnsmasq-dns-6c9c9f998c-gdq6v" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.555596 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/81c9e0ff-d205-4fa2-9606-5ba89c367008-scripts\") pod \"keystone-bootstrap-kb7c9\" (UID: \"81c9e0ff-d205-4fa2-9606-5ba89c367008\") " pod="openstack/keystone-bootstrap-kb7c9" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.555614 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/80d6eea1-c7e1-4f5e-aa4a-ec74723db231-config\") pod \"dnsmasq-dns-6c9c9f998c-gdq6v\" (UID: \"80d6eea1-c7e1-4f5e-aa4a-ec74723db231\") " pod="openstack/dnsmasq-dns-6c9c9f998c-gdq6v" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.555638 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81c9e0ff-d205-4fa2-9606-5ba89c367008-combined-ca-bundle\") pod \"keystone-bootstrap-kb7c9\" (UID: \"81c9e0ff-d205-4fa2-9606-5ba89c367008\") " pod="openstack/keystone-bootstrap-kb7c9" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.658572 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/81c9e0ff-d205-4fa2-9606-5ba89c367008-fernet-keys\") pod \"keystone-bootstrap-kb7c9\" (UID: \"81c9e0ff-d205-4fa2-9606-5ba89c367008\") " pod="openstack/keystone-bootstrap-kb7c9" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.658648 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/80d6eea1-c7e1-4f5e-aa4a-ec74723db231-ovsdbserver-sb\") pod \"dnsmasq-dns-6c9c9f998c-gdq6v\" (UID: \"80d6eea1-c7e1-4f5e-aa4a-ec74723db231\") " pod="openstack/dnsmasq-dns-6c9c9f998c-gdq6v" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.658675 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/80d6eea1-c7e1-4f5e-aa4a-ec74723db231-dns-svc\") pod \"dnsmasq-dns-6c9c9f998c-gdq6v\" (UID: \"80d6eea1-c7e1-4f5e-aa4a-ec74723db231\") " pod="openstack/dnsmasq-dns-6c9c9f998c-gdq6v" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.658703 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z4kg8\" (UniqueName: \"kubernetes.io/projected/81c9e0ff-d205-4fa2-9606-5ba89c367008-kube-api-access-z4kg8\") pod \"keystone-bootstrap-kb7c9\" (UID: \"81c9e0ff-d205-4fa2-9606-5ba89c367008\") " pod="openstack/keystone-bootstrap-kb7c9" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.658724 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/80d6eea1-c7e1-4f5e-aa4a-ec74723db231-ovsdbserver-nb\") pod \"dnsmasq-dns-6c9c9f998c-gdq6v\" (UID: \"80d6eea1-c7e1-4f5e-aa4a-ec74723db231\") " pod="openstack/dnsmasq-dns-6c9c9f998c-gdq6v" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.658751 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/80d6eea1-c7e1-4f5e-aa4a-ec74723db231-dns-swift-storage-0\") pod \"dnsmasq-dns-6c9c9f998c-gdq6v\" (UID: \"80d6eea1-c7e1-4f5e-aa4a-ec74723db231\") " pod="openstack/dnsmasq-dns-6c9c9f998c-gdq6v" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.658773 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/81c9e0ff-d205-4fa2-9606-5ba89c367008-credential-keys\") pod \"keystone-bootstrap-kb7c9\" (UID: \"81c9e0ff-d205-4fa2-9606-5ba89c367008\") " pod="openstack/keystone-bootstrap-kb7c9" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.658792 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/81c9e0ff-d205-4fa2-9606-5ba89c367008-config-data\") pod \"keystone-bootstrap-kb7c9\" (UID: \"81c9e0ff-d205-4fa2-9606-5ba89c367008\") " pod="openstack/keystone-bootstrap-kb7c9" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.658807 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5v2x2\" (UniqueName: \"kubernetes.io/projected/80d6eea1-c7e1-4f5e-aa4a-ec74723db231-kube-api-access-5v2x2\") pod \"dnsmasq-dns-6c9c9f998c-gdq6v\" (UID: \"80d6eea1-c7e1-4f5e-aa4a-ec74723db231\") " pod="openstack/dnsmasq-dns-6c9c9f998c-gdq6v" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.658840 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/81c9e0ff-d205-4fa2-9606-5ba89c367008-scripts\") pod \"keystone-bootstrap-kb7c9\" (UID: \"81c9e0ff-d205-4fa2-9606-5ba89c367008\") " pod="openstack/keystone-bootstrap-kb7c9" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.658855 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/80d6eea1-c7e1-4f5e-aa4a-ec74723db231-config\") pod \"dnsmasq-dns-6c9c9f998c-gdq6v\" (UID: \"80d6eea1-c7e1-4f5e-aa4a-ec74723db231\") " pod="openstack/dnsmasq-dns-6c9c9f998c-gdq6v" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.658874 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81c9e0ff-d205-4fa2-9606-5ba89c367008-combined-ca-bundle\") pod \"keystone-bootstrap-kb7c9\" (UID: \"81c9e0ff-d205-4fa2-9606-5ba89c367008\") " pod="openstack/keystone-bootstrap-kb7c9" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.660733 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/80d6eea1-c7e1-4f5e-aa4a-ec74723db231-dns-swift-storage-0\") pod \"dnsmasq-dns-6c9c9f998c-gdq6v\" (UID: \"80d6eea1-c7e1-4f5e-aa4a-ec74723db231\") " pod="openstack/dnsmasq-dns-6c9c9f998c-gdq6v" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.661652 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/80d6eea1-c7e1-4f5e-aa4a-ec74723db231-dns-svc\") pod \"dnsmasq-dns-6c9c9f998c-gdq6v\" (UID: \"80d6eea1-c7e1-4f5e-aa4a-ec74723db231\") " pod="openstack/dnsmasq-dns-6c9c9f998c-gdq6v" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.662214 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/80d6eea1-c7e1-4f5e-aa4a-ec74723db231-ovsdbserver-nb\") pod \"dnsmasq-dns-6c9c9f998c-gdq6v\" (UID: \"80d6eea1-c7e1-4f5e-aa4a-ec74723db231\") " pod="openstack/dnsmasq-dns-6c9c9f998c-gdq6v" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.662257 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/80d6eea1-c7e1-4f5e-aa4a-ec74723db231-ovsdbserver-sb\") pod \"dnsmasq-dns-6c9c9f998c-gdq6v\" (UID: \"80d6eea1-c7e1-4f5e-aa4a-ec74723db231\") " pod="openstack/dnsmasq-dns-6c9c9f998c-gdq6v" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.665903 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/80d6eea1-c7e1-4f5e-aa4a-ec74723db231-config\") pod \"dnsmasq-dns-6c9c9f998c-gdq6v\" (UID: \"80d6eea1-c7e1-4f5e-aa4a-ec74723db231\") " pod="openstack/dnsmasq-dns-6c9c9f998c-gdq6v" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.667692 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81c9e0ff-d205-4fa2-9606-5ba89c367008-combined-ca-bundle\") pod \"keystone-bootstrap-kb7c9\" (UID: \"81c9e0ff-d205-4fa2-9606-5ba89c367008\") " pod="openstack/keystone-bootstrap-kb7c9" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.677584 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/81c9e0ff-d205-4fa2-9606-5ba89c367008-fernet-keys\") pod \"keystone-bootstrap-kb7c9\" (UID: \"81c9e0ff-d205-4fa2-9606-5ba89c367008\") " pod="openstack/keystone-bootstrap-kb7c9" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.678236 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-69899bbb49-h5pnl"] Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.680152 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/81c9e0ff-d205-4fa2-9606-5ba89c367008-credential-keys\") pod \"keystone-bootstrap-kb7c9\" (UID: \"81c9e0ff-d205-4fa2-9606-5ba89c367008\") " pod="openstack/keystone-bootstrap-kb7c9" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.680272 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-69899bbb49-h5pnl" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.682084 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/81c9e0ff-d205-4fa2-9606-5ba89c367008-scripts\") pod \"keystone-bootstrap-kb7c9\" (UID: \"81c9e0ff-d205-4fa2-9606-5ba89c367008\") " pod="openstack/keystone-bootstrap-kb7c9" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.685301 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/81c9e0ff-d205-4fa2-9606-5ba89c367008-config-data\") pod \"keystone-bootstrap-kb7c9\" (UID: \"81c9e0ff-d205-4fa2-9606-5ba89c367008\") " pod="openstack/keystone-bootstrap-kb7c9" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.691252 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon-horizon-dockercfg-49889" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.691548 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.691662 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-scripts" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.698209 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-config-data" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.713832 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5v2x2\" (UniqueName: \"kubernetes.io/projected/80d6eea1-c7e1-4f5e-aa4a-ec74723db231-kube-api-access-5v2x2\") pod \"dnsmasq-dns-6c9c9f998c-gdq6v\" (UID: \"80d6eea1-c7e1-4f5e-aa4a-ec74723db231\") " pod="openstack/dnsmasq-dns-6c9c9f998c-gdq6v" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.719020 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-69899bbb49-h5pnl"] Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.727959 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z4kg8\" (UniqueName: \"kubernetes.io/projected/81c9e0ff-d205-4fa2-9606-5ba89c367008-kube-api-access-z4kg8\") pod \"keystone-bootstrap-kb7c9\" (UID: \"81c9e0ff-d205-4fa2-9606-5ba89c367008\") " pod="openstack/keystone-bootstrap-kb7c9" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.752273 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.754873 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.759178 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.759496 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.759754 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fda7363a-5096-4fd1-9549-dcd2e5c4f70b-logs\") pod \"horizon-69899bbb49-h5pnl\" (UID: \"fda7363a-5096-4fd1-9549-dcd2e5c4f70b\") " pod="openstack/horizon-69899bbb49-h5pnl" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.759831 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/fda7363a-5096-4fd1-9549-dcd2e5c4f70b-config-data\") pod \"horizon-69899bbb49-h5pnl\" (UID: \"fda7363a-5096-4fd1-9549-dcd2e5c4f70b\") " pod="openstack/horizon-69899bbb49-h5pnl" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.759871 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/fda7363a-5096-4fd1-9549-dcd2e5c4f70b-horizon-secret-key\") pod \"horizon-69899bbb49-h5pnl\" (UID: \"fda7363a-5096-4fd1-9549-dcd2e5c4f70b\") " pod="openstack/horizon-69899bbb49-h5pnl" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.759898 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fda7363a-5096-4fd1-9549-dcd2e5c4f70b-scripts\") pod \"horizon-69899bbb49-h5pnl\" (UID: \"fda7363a-5096-4fd1-9549-dcd2e5c4f70b\") " pod="openstack/horizon-69899bbb49-h5pnl" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.759917 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cq6qg\" (UniqueName: \"kubernetes.io/projected/fda7363a-5096-4fd1-9549-dcd2e5c4f70b-kube-api-access-cq6qg\") pod \"horizon-69899bbb49-h5pnl\" (UID: \"fda7363a-5096-4fd1-9549-dcd2e5c4f70b\") " pod="openstack/horizon-69899bbb49-h5pnl" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.796403 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-kb7c9" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.804185 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.813400 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c9c9f998c-gdq6v" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.861396 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16b0d24a-e647-4381-9f03-9b48c34ba52f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"16b0d24a-e647-4381-9f03-9b48c34ba52f\") " pod="openstack/ceilometer-0" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.861438 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/16b0d24a-e647-4381-9f03-9b48c34ba52f-config-data\") pod \"ceilometer-0\" (UID: \"16b0d24a-e647-4381-9f03-9b48c34ba52f\") " pod="openstack/ceilometer-0" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.861462 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p6kxl\" (UniqueName: \"kubernetes.io/projected/16b0d24a-e647-4381-9f03-9b48c34ba52f-kube-api-access-p6kxl\") pod \"ceilometer-0\" (UID: \"16b0d24a-e647-4381-9f03-9b48c34ba52f\") " pod="openstack/ceilometer-0" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.861490 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/fda7363a-5096-4fd1-9549-dcd2e5c4f70b-config-data\") pod \"horizon-69899bbb49-h5pnl\" (UID: \"fda7363a-5096-4fd1-9549-dcd2e5c4f70b\") " pod="openstack/horizon-69899bbb49-h5pnl" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.861526 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/fda7363a-5096-4fd1-9549-dcd2e5c4f70b-horizon-secret-key\") pod \"horizon-69899bbb49-h5pnl\" (UID: \"fda7363a-5096-4fd1-9549-dcd2e5c4f70b\") " pod="openstack/horizon-69899bbb49-h5pnl" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.861555 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fda7363a-5096-4fd1-9549-dcd2e5c4f70b-scripts\") pod \"horizon-69899bbb49-h5pnl\" (UID: \"fda7363a-5096-4fd1-9549-dcd2e5c4f70b\") " pod="openstack/horizon-69899bbb49-h5pnl" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.861572 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cq6qg\" (UniqueName: \"kubernetes.io/projected/fda7363a-5096-4fd1-9549-dcd2e5c4f70b-kube-api-access-cq6qg\") pod \"horizon-69899bbb49-h5pnl\" (UID: \"fda7363a-5096-4fd1-9549-dcd2e5c4f70b\") " pod="openstack/horizon-69899bbb49-h5pnl" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.861592 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/16b0d24a-e647-4381-9f03-9b48c34ba52f-scripts\") pod \"ceilometer-0\" (UID: \"16b0d24a-e647-4381-9f03-9b48c34ba52f\") " pod="openstack/ceilometer-0" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.861637 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/16b0d24a-e647-4381-9f03-9b48c34ba52f-run-httpd\") pod \"ceilometer-0\" (UID: \"16b0d24a-e647-4381-9f03-9b48c34ba52f\") " pod="openstack/ceilometer-0" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.861662 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/16b0d24a-e647-4381-9f03-9b48c34ba52f-log-httpd\") pod \"ceilometer-0\" (UID: \"16b0d24a-e647-4381-9f03-9b48c34ba52f\") " pod="openstack/ceilometer-0" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.861682 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/16b0d24a-e647-4381-9f03-9b48c34ba52f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"16b0d24a-e647-4381-9f03-9b48c34ba52f\") " pod="openstack/ceilometer-0" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.861698 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fda7363a-5096-4fd1-9549-dcd2e5c4f70b-logs\") pod \"horizon-69899bbb49-h5pnl\" (UID: \"fda7363a-5096-4fd1-9549-dcd2e5c4f70b\") " pod="openstack/horizon-69899bbb49-h5pnl" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.862084 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fda7363a-5096-4fd1-9549-dcd2e5c4f70b-logs\") pod \"horizon-69899bbb49-h5pnl\" (UID: \"fda7363a-5096-4fd1-9549-dcd2e5c4f70b\") " pod="openstack/horizon-69899bbb49-h5pnl" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.863098 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/fda7363a-5096-4fd1-9549-dcd2e5c4f70b-config-data\") pod \"horizon-69899bbb49-h5pnl\" (UID: \"fda7363a-5096-4fd1-9549-dcd2e5c4f70b\") " pod="openstack/horizon-69899bbb49-h5pnl" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.864989 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fda7363a-5096-4fd1-9549-dcd2e5c4f70b-scripts\") pod \"horizon-69899bbb49-h5pnl\" (UID: \"fda7363a-5096-4fd1-9549-dcd2e5c4f70b\") " pod="openstack/horizon-69899bbb49-h5pnl" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.869289 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/fda7363a-5096-4fd1-9549-dcd2e5c4f70b-horizon-secret-key\") pod \"horizon-69899bbb49-h5pnl\" (UID: \"fda7363a-5096-4fd1-9549-dcd2e5c4f70b\") " pod="openstack/horizon-69899bbb49-h5pnl" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.878673 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6c9c9f998c-gdq6v"] Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.897323 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cq6qg\" (UniqueName: \"kubernetes.io/projected/fda7363a-5096-4fd1-9549-dcd2e5c4f70b-kube-api-access-cq6qg\") pod \"horizon-69899bbb49-h5pnl\" (UID: \"fda7363a-5096-4fd1-9549-dcd2e5c4f70b\") " pod="openstack/horizon-69899bbb49-h5pnl" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.919717 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-ctfxs"] Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.920783 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-ctfxs" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.924787 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-rzsbm" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.924907 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.932326 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.962900 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/16b0d24a-e647-4381-9f03-9b48c34ba52f-run-httpd\") pod \"ceilometer-0\" (UID: \"16b0d24a-e647-4381-9f03-9b48c34ba52f\") " pod="openstack/ceilometer-0" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.963241 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/16b0d24a-e647-4381-9f03-9b48c34ba52f-log-httpd\") pod \"ceilometer-0\" (UID: \"16b0d24a-e647-4381-9f03-9b48c34ba52f\") " pod="openstack/ceilometer-0" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.963376 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96c528a9-d9c6-4eec-b63f-5bba189744ae-combined-ca-bundle\") pod \"placement-db-sync-ctfxs\" (UID: \"96c528a9-d9c6-4eec-b63f-5bba189744ae\") " pod="openstack/placement-db-sync-ctfxs" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.963494 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/96c528a9-d9c6-4eec-b63f-5bba189744ae-config-data\") pod \"placement-db-sync-ctfxs\" (UID: \"96c528a9-d9c6-4eec-b63f-5bba189744ae\") " pod="openstack/placement-db-sync-ctfxs" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.963623 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/16b0d24a-e647-4381-9f03-9b48c34ba52f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"16b0d24a-e647-4381-9f03-9b48c34ba52f\") " pod="openstack/ceilometer-0" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.963761 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16b0d24a-e647-4381-9f03-9b48c34ba52f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"16b0d24a-e647-4381-9f03-9b48c34ba52f\") " pod="openstack/ceilometer-0" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.963900 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/16b0d24a-e647-4381-9f03-9b48c34ba52f-config-data\") pod \"ceilometer-0\" (UID: \"16b0d24a-e647-4381-9f03-9b48c34ba52f\") " pod="openstack/ceilometer-0" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.964040 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p6kxl\" (UniqueName: \"kubernetes.io/projected/16b0d24a-e647-4381-9f03-9b48c34ba52f-kube-api-access-p6kxl\") pod \"ceilometer-0\" (UID: \"16b0d24a-e647-4381-9f03-9b48c34ba52f\") " pod="openstack/ceilometer-0" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.964173 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kqcc7\" (UniqueName: \"kubernetes.io/projected/96c528a9-d9c6-4eec-b63f-5bba189744ae-kube-api-access-kqcc7\") pod \"placement-db-sync-ctfxs\" (UID: \"96c528a9-d9c6-4eec-b63f-5bba189744ae\") " pod="openstack/placement-db-sync-ctfxs" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.964299 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/96c528a9-d9c6-4eec-b63f-5bba189744ae-logs\") pod \"placement-db-sync-ctfxs\" (UID: \"96c528a9-d9c6-4eec-b63f-5bba189744ae\") " pod="openstack/placement-db-sync-ctfxs" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.964405 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/16b0d24a-e647-4381-9f03-9b48c34ba52f-scripts\") pod \"ceilometer-0\" (UID: \"16b0d24a-e647-4381-9f03-9b48c34ba52f\") " pod="openstack/ceilometer-0" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.964537 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/96c528a9-d9c6-4eec-b63f-5bba189744ae-scripts\") pod \"placement-db-sync-ctfxs\" (UID: \"96c528a9-d9c6-4eec-b63f-5bba189744ae\") " pod="openstack/placement-db-sync-ctfxs" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.965077 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/16b0d24a-e647-4381-9f03-9b48c34ba52f-run-httpd\") pod \"ceilometer-0\" (UID: \"16b0d24a-e647-4381-9f03-9b48c34ba52f\") " pod="openstack/ceilometer-0" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.966389 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/16b0d24a-e647-4381-9f03-9b48c34ba52f-log-httpd\") pod \"ceilometer-0\" (UID: \"16b0d24a-e647-4381-9f03-9b48c34ba52f\") " pod="openstack/ceilometer-0" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.971888 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/16b0d24a-e647-4381-9f03-9b48c34ba52f-scripts\") pod \"ceilometer-0\" (UID: \"16b0d24a-e647-4381-9f03-9b48c34ba52f\") " pod="openstack/ceilometer-0" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.972198 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16b0d24a-e647-4381-9f03-9b48c34ba52f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"16b0d24a-e647-4381-9f03-9b48c34ba52f\") " pod="openstack/ceilometer-0" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.972544 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-ctfxs"] Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.975253 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/16b0d24a-e647-4381-9f03-9b48c34ba52f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"16b0d24a-e647-4381-9f03-9b48c34ba52f\") " pod="openstack/ceilometer-0" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.977100 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/16b0d24a-e647-4381-9f03-9b48c34ba52f-config-data\") pod \"ceilometer-0\" (UID: \"16b0d24a-e647-4381-9f03-9b48c34ba52f\") " pod="openstack/ceilometer-0" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.989485 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-57c957c4ff-t96wz"] Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.991659 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57c957c4ff-t96wz" Oct 11 05:07:36 crc kubenswrapper[4651]: I1011 05:07:36.996854 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p6kxl\" (UniqueName: \"kubernetes.io/projected/16b0d24a-e647-4381-9f03-9b48c34ba52f-kube-api-access-p6kxl\") pod \"ceilometer-0\" (UID: \"16b0d24a-e647-4381-9f03-9b48c34ba52f\") " pod="openstack/ceilometer-0" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.013939 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-8rxgw"] Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.015268 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-8rxgw" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.021244 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.021364 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-wzgq2" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.021603 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.041759 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57c957c4ff-t96wz"] Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.055076 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-8rxgw"] Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.065887 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/96c528a9-d9c6-4eec-b63f-5bba189744ae-scripts\") pod \"placement-db-sync-ctfxs\" (UID: \"96c528a9-d9c6-4eec-b63f-5bba189744ae\") " pod="openstack/placement-db-sync-ctfxs" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.065959 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/90edbdf3-b435-459f-9a74-3f9ea9ace40f-dns-swift-storage-0\") pod \"dnsmasq-dns-57c957c4ff-t96wz\" (UID: \"90edbdf3-b435-459f-9a74-3f9ea9ace40f\") " pod="openstack/dnsmasq-dns-57c957c4ff-t96wz" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.065999 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/36b45d75-4e52-49b7-b7d7-13d53d2f7076-db-sync-config-data\") pod \"cinder-db-sync-8rxgw\" (UID: \"36b45d75-4e52-49b7-b7d7-13d53d2f7076\") " pod="openstack/cinder-db-sync-8rxgw" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.066016 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rp2lz\" (UniqueName: \"kubernetes.io/projected/90edbdf3-b435-459f-9a74-3f9ea9ace40f-kube-api-access-rp2lz\") pod \"dnsmasq-dns-57c957c4ff-t96wz\" (UID: \"90edbdf3-b435-459f-9a74-3f9ea9ace40f\") " pod="openstack/dnsmasq-dns-57c957c4ff-t96wz" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.066048 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96c528a9-d9c6-4eec-b63f-5bba189744ae-combined-ca-bundle\") pod \"placement-db-sync-ctfxs\" (UID: \"96c528a9-d9c6-4eec-b63f-5bba189744ae\") " pod="openstack/placement-db-sync-ctfxs" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.066080 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/96c528a9-d9c6-4eec-b63f-5bba189744ae-config-data\") pod \"placement-db-sync-ctfxs\" (UID: \"96c528a9-d9c6-4eec-b63f-5bba189744ae\") " pod="openstack/placement-db-sync-ctfxs" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.066114 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36b45d75-4e52-49b7-b7d7-13d53d2f7076-config-data\") pod \"cinder-db-sync-8rxgw\" (UID: \"36b45d75-4e52-49b7-b7d7-13d53d2f7076\") " pod="openstack/cinder-db-sync-8rxgw" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.066160 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/36b45d75-4e52-49b7-b7d7-13d53d2f7076-scripts\") pod \"cinder-db-sync-8rxgw\" (UID: \"36b45d75-4e52-49b7-b7d7-13d53d2f7076\") " pod="openstack/cinder-db-sync-8rxgw" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.066174 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/90edbdf3-b435-459f-9a74-3f9ea9ace40f-config\") pod \"dnsmasq-dns-57c957c4ff-t96wz\" (UID: \"90edbdf3-b435-459f-9a74-3f9ea9ace40f\") " pod="openstack/dnsmasq-dns-57c957c4ff-t96wz" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.066227 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36b45d75-4e52-49b7-b7d7-13d53d2f7076-combined-ca-bundle\") pod \"cinder-db-sync-8rxgw\" (UID: \"36b45d75-4e52-49b7-b7d7-13d53d2f7076\") " pod="openstack/cinder-db-sync-8rxgw" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.066251 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kqcc7\" (UniqueName: \"kubernetes.io/projected/96c528a9-d9c6-4eec-b63f-5bba189744ae-kube-api-access-kqcc7\") pod \"placement-db-sync-ctfxs\" (UID: \"96c528a9-d9c6-4eec-b63f-5bba189744ae\") " pod="openstack/placement-db-sync-ctfxs" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.066276 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/36b45d75-4e52-49b7-b7d7-13d53d2f7076-etc-machine-id\") pod \"cinder-db-sync-8rxgw\" (UID: \"36b45d75-4e52-49b7-b7d7-13d53d2f7076\") " pod="openstack/cinder-db-sync-8rxgw" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.066321 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/96c528a9-d9c6-4eec-b63f-5bba189744ae-logs\") pod \"placement-db-sync-ctfxs\" (UID: \"96c528a9-d9c6-4eec-b63f-5bba189744ae\") " pod="openstack/placement-db-sync-ctfxs" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.066347 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/90edbdf3-b435-459f-9a74-3f9ea9ace40f-dns-svc\") pod \"dnsmasq-dns-57c957c4ff-t96wz\" (UID: \"90edbdf3-b435-459f-9a74-3f9ea9ace40f\") " pod="openstack/dnsmasq-dns-57c957c4ff-t96wz" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.066383 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/90edbdf3-b435-459f-9a74-3f9ea9ace40f-ovsdbserver-sb\") pod \"dnsmasq-dns-57c957c4ff-t96wz\" (UID: \"90edbdf3-b435-459f-9a74-3f9ea9ace40f\") " pod="openstack/dnsmasq-dns-57c957c4ff-t96wz" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.066401 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/90edbdf3-b435-459f-9a74-3f9ea9ace40f-ovsdbserver-nb\") pod \"dnsmasq-dns-57c957c4ff-t96wz\" (UID: \"90edbdf3-b435-459f-9a74-3f9ea9ace40f\") " pod="openstack/dnsmasq-dns-57c957c4ff-t96wz" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.066416 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nbjpx\" (UniqueName: \"kubernetes.io/projected/36b45d75-4e52-49b7-b7d7-13d53d2f7076-kube-api-access-nbjpx\") pod \"cinder-db-sync-8rxgw\" (UID: \"36b45d75-4e52-49b7-b7d7-13d53d2f7076\") " pod="openstack/cinder-db-sync-8rxgw" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.067887 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-64c56644c5-nxbb9"] Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.069257 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-64c56644c5-nxbb9" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.072295 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/96c528a9-d9c6-4eec-b63f-5bba189744ae-logs\") pod \"placement-db-sync-ctfxs\" (UID: \"96c528a9-d9c6-4eec-b63f-5bba189744ae\") " pod="openstack/placement-db-sync-ctfxs" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.078682 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-chq8s"] Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.079674 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-chq8s" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.082476 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96c528a9-d9c6-4eec-b63f-5bba189744ae-combined-ca-bundle\") pod \"placement-db-sync-ctfxs\" (UID: \"96c528a9-d9c6-4eec-b63f-5bba189744ae\") " pod="openstack/placement-db-sync-ctfxs" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.082949 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.083143 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-4bzxx" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.098889 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-69899bbb49-h5pnl" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.106002 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/96c528a9-d9c6-4eec-b63f-5bba189744ae-scripts\") pod \"placement-db-sync-ctfxs\" (UID: \"96c528a9-d9c6-4eec-b63f-5bba189744ae\") " pod="openstack/placement-db-sync-ctfxs" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.108707 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/96c528a9-d9c6-4eec-b63f-5bba189744ae-config-data\") pod \"placement-db-sync-ctfxs\" (UID: \"96c528a9-d9c6-4eec-b63f-5bba189744ae\") " pod="openstack/placement-db-sync-ctfxs" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.108837 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-64c56644c5-nxbb9"] Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.110478 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kqcc7\" (UniqueName: \"kubernetes.io/projected/96c528a9-d9c6-4eec-b63f-5bba189744ae-kube-api-access-kqcc7\") pod \"placement-db-sync-ctfxs\" (UID: \"96c528a9-d9c6-4eec-b63f-5bba189744ae\") " pod="openstack/placement-db-sync-ctfxs" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.126108 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-chq8s"] Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.126485 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.130803 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.132418 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.138341 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.138389 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-9wz7g" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.139129 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.139248 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.168880 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.169755 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36b45d75-4e52-49b7-b7d7-13d53d2f7076-config-data\") pod \"cinder-db-sync-8rxgw\" (UID: \"36b45d75-4e52-49b7-b7d7-13d53d2f7076\") " pod="openstack/cinder-db-sync-8rxgw" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.169794 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tcf7n\" (UniqueName: \"kubernetes.io/projected/4f425c3a-376b-4ba1-8066-96b2d1f21698-kube-api-access-tcf7n\") pod \"barbican-db-sync-chq8s\" (UID: \"4f425c3a-376b-4ba1-8066-96b2d1f21698\") " pod="openstack/barbican-db-sync-chq8s" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.169835 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/36b45d75-4e52-49b7-b7d7-13d53d2f7076-scripts\") pod \"cinder-db-sync-8rxgw\" (UID: \"36b45d75-4e52-49b7-b7d7-13d53d2f7076\") " pod="openstack/cinder-db-sync-8rxgw" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.169853 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/90edbdf3-b435-459f-9a74-3f9ea9ace40f-config\") pod \"dnsmasq-dns-57c957c4ff-t96wz\" (UID: \"90edbdf3-b435-459f-9a74-3f9ea9ace40f\") " pod="openstack/dnsmasq-dns-57c957c4ff-t96wz" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.169921 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36b45d75-4e52-49b7-b7d7-13d53d2f7076-combined-ca-bundle\") pod \"cinder-db-sync-8rxgw\" (UID: \"36b45d75-4e52-49b7-b7d7-13d53d2f7076\") " pod="openstack/cinder-db-sync-8rxgw" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.169953 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/36b45d75-4e52-49b7-b7d7-13d53d2f7076-etc-machine-id\") pod \"cinder-db-sync-8rxgw\" (UID: \"36b45d75-4e52-49b7-b7d7-13d53d2f7076\") " pod="openstack/cinder-db-sync-8rxgw" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.170004 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f425c3a-376b-4ba1-8066-96b2d1f21698-combined-ca-bundle\") pod \"barbican-db-sync-chq8s\" (UID: \"4f425c3a-376b-4ba1-8066-96b2d1f21698\") " pod="openstack/barbican-db-sync-chq8s" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.170033 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/90edbdf3-b435-459f-9a74-3f9ea9ace40f-dns-svc\") pod \"dnsmasq-dns-57c957c4ff-t96wz\" (UID: \"90edbdf3-b435-459f-9a74-3f9ea9ace40f\") " pod="openstack/dnsmasq-dns-57c957c4ff-t96wz" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.170079 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/90edbdf3-b435-459f-9a74-3f9ea9ace40f-ovsdbserver-sb\") pod \"dnsmasq-dns-57c957c4ff-t96wz\" (UID: \"90edbdf3-b435-459f-9a74-3f9ea9ace40f\") " pod="openstack/dnsmasq-dns-57c957c4ff-t96wz" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.170107 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/4f425c3a-376b-4ba1-8066-96b2d1f21698-db-sync-config-data\") pod \"barbican-db-sync-chq8s\" (UID: \"4f425c3a-376b-4ba1-8066-96b2d1f21698\") " pod="openstack/barbican-db-sync-chq8s" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.170144 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/90edbdf3-b435-459f-9a74-3f9ea9ace40f-ovsdbserver-nb\") pod \"dnsmasq-dns-57c957c4ff-t96wz\" (UID: \"90edbdf3-b435-459f-9a74-3f9ea9ace40f\") " pod="openstack/dnsmasq-dns-57c957c4ff-t96wz" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.170162 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nbjpx\" (UniqueName: \"kubernetes.io/projected/36b45d75-4e52-49b7-b7d7-13d53d2f7076-kube-api-access-nbjpx\") pod \"cinder-db-sync-8rxgw\" (UID: \"36b45d75-4e52-49b7-b7d7-13d53d2f7076\") " pod="openstack/cinder-db-sync-8rxgw" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.170219 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/90edbdf3-b435-459f-9a74-3f9ea9ace40f-dns-swift-storage-0\") pod \"dnsmasq-dns-57c957c4ff-t96wz\" (UID: \"90edbdf3-b435-459f-9a74-3f9ea9ace40f\") " pod="openstack/dnsmasq-dns-57c957c4ff-t96wz" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.170256 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/36b45d75-4e52-49b7-b7d7-13d53d2f7076-db-sync-config-data\") pod \"cinder-db-sync-8rxgw\" (UID: \"36b45d75-4e52-49b7-b7d7-13d53d2f7076\") " pod="openstack/cinder-db-sync-8rxgw" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.170274 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rp2lz\" (UniqueName: \"kubernetes.io/projected/90edbdf3-b435-459f-9a74-3f9ea9ace40f-kube-api-access-rp2lz\") pod \"dnsmasq-dns-57c957c4ff-t96wz\" (UID: \"90edbdf3-b435-459f-9a74-3f9ea9ace40f\") " pod="openstack/dnsmasq-dns-57c957c4ff-t96wz" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.171764 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/36b45d75-4e52-49b7-b7d7-13d53d2f7076-etc-machine-id\") pod \"cinder-db-sync-8rxgw\" (UID: \"36b45d75-4e52-49b7-b7d7-13d53d2f7076\") " pod="openstack/cinder-db-sync-8rxgw" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.172690 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/90edbdf3-b435-459f-9a74-3f9ea9ace40f-dns-svc\") pod \"dnsmasq-dns-57c957c4ff-t96wz\" (UID: \"90edbdf3-b435-459f-9a74-3f9ea9ace40f\") " pod="openstack/dnsmasq-dns-57c957c4ff-t96wz" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.173398 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/90edbdf3-b435-459f-9a74-3f9ea9ace40f-ovsdbserver-sb\") pod \"dnsmasq-dns-57c957c4ff-t96wz\" (UID: \"90edbdf3-b435-459f-9a74-3f9ea9ace40f\") " pod="openstack/dnsmasq-dns-57c957c4ff-t96wz" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.173596 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/90edbdf3-b435-459f-9a74-3f9ea9ace40f-config\") pod \"dnsmasq-dns-57c957c4ff-t96wz\" (UID: \"90edbdf3-b435-459f-9a74-3f9ea9ace40f\") " pod="openstack/dnsmasq-dns-57c957c4ff-t96wz" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.175084 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36b45d75-4e52-49b7-b7d7-13d53d2f7076-config-data\") pod \"cinder-db-sync-8rxgw\" (UID: \"36b45d75-4e52-49b7-b7d7-13d53d2f7076\") " pod="openstack/cinder-db-sync-8rxgw" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.176257 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/90edbdf3-b435-459f-9a74-3f9ea9ace40f-dns-swift-storage-0\") pod \"dnsmasq-dns-57c957c4ff-t96wz\" (UID: \"90edbdf3-b435-459f-9a74-3f9ea9ace40f\") " pod="openstack/dnsmasq-dns-57c957c4ff-t96wz" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.176573 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/36b45d75-4e52-49b7-b7d7-13d53d2f7076-db-sync-config-data\") pod \"cinder-db-sync-8rxgw\" (UID: \"36b45d75-4e52-49b7-b7d7-13d53d2f7076\") " pod="openstack/cinder-db-sync-8rxgw" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.176942 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-895cf5cf-n27b7" podUID="d6e586e3-8d32-4116-a47d-fd410e4c2c2d" containerName="dnsmasq-dns" containerID="cri-o://d018b4446595cff8a0ec737a2c8414a79b19c0db7a4196134a65a47cb59b50b0" gracePeriod=10 Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.177114 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/90edbdf3-b435-459f-9a74-3f9ea9ace40f-ovsdbserver-nb\") pod \"dnsmasq-dns-57c957c4ff-t96wz\" (UID: \"90edbdf3-b435-459f-9a74-3f9ea9ace40f\") " pod="openstack/dnsmasq-dns-57c957c4ff-t96wz" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.178236 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/36b45d75-4e52-49b7-b7d7-13d53d2f7076-scripts\") pod \"cinder-db-sync-8rxgw\" (UID: \"36b45d75-4e52-49b7-b7d7-13d53d2f7076\") " pod="openstack/cinder-db-sync-8rxgw" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.189068 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36b45d75-4e52-49b7-b7d7-13d53d2f7076-combined-ca-bundle\") pod \"cinder-db-sync-8rxgw\" (UID: \"36b45d75-4e52-49b7-b7d7-13d53d2f7076\") " pod="openstack/cinder-db-sync-8rxgw" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.200143 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nbjpx\" (UniqueName: \"kubernetes.io/projected/36b45d75-4e52-49b7-b7d7-13d53d2f7076-kube-api-access-nbjpx\") pod \"cinder-db-sync-8rxgw\" (UID: \"36b45d75-4e52-49b7-b7d7-13d53d2f7076\") " pod="openstack/cinder-db-sync-8rxgw" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.200559 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rp2lz\" (UniqueName: \"kubernetes.io/projected/90edbdf3-b435-459f-9a74-3f9ea9ace40f-kube-api-access-rp2lz\") pod \"dnsmasq-dns-57c957c4ff-t96wz\" (UID: \"90edbdf3-b435-459f-9a74-3f9ea9ace40f\") " pod="openstack/dnsmasq-dns-57c957c4ff-t96wz" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.223119 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-zvfkb"] Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.224529 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-zvfkb" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.228588 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.228848 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.229028 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-kqtgr" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.254794 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-zvfkb"] Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.261970 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-ctfxs" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.272431 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6a85fe60-6391-47c1-8f31-803a742a188a-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"6a85fe60-6391-47c1-8f31-803a742a188a\") " pod="openstack/glance-default-external-api-0" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.272476 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/786ef132-9cae-4be8-9ffa-7dc9bcd86a5a-scripts\") pod \"horizon-64c56644c5-nxbb9\" (UID: \"786ef132-9cae-4be8-9ffa-7dc9bcd86a5a\") " pod="openstack/horizon-64c56644c5-nxbb9" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.272507 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6a85fe60-6391-47c1-8f31-803a742a188a-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"6a85fe60-6391-47c1-8f31-803a742a188a\") " pod="openstack/glance-default-external-api-0" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.272546 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/786ef132-9cae-4be8-9ffa-7dc9bcd86a5a-logs\") pod \"horizon-64c56644c5-nxbb9\" (UID: \"786ef132-9cae-4be8-9ffa-7dc9bcd86a5a\") " pod="openstack/horizon-64c56644c5-nxbb9" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.272572 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"6a85fe60-6391-47c1-8f31-803a742a188a\") " pod="openstack/glance-default-external-api-0" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.272607 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f425c3a-376b-4ba1-8066-96b2d1f21698-combined-ca-bundle\") pod \"barbican-db-sync-chq8s\" (UID: \"4f425c3a-376b-4ba1-8066-96b2d1f21698\") " pod="openstack/barbican-db-sync-chq8s" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.272642 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rbjq6\" (UniqueName: \"kubernetes.io/projected/6a85fe60-6391-47c1-8f31-803a742a188a-kube-api-access-rbjq6\") pod \"glance-default-external-api-0\" (UID: \"6a85fe60-6391-47c1-8f31-803a742a188a\") " pod="openstack/glance-default-external-api-0" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.272679 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/4f425c3a-376b-4ba1-8066-96b2d1f21698-db-sync-config-data\") pod \"barbican-db-sync-chq8s\" (UID: \"4f425c3a-376b-4ba1-8066-96b2d1f21698\") " pod="openstack/barbican-db-sync-chq8s" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.272699 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6a85fe60-6391-47c1-8f31-803a742a188a-config-data\") pod \"glance-default-external-api-0\" (UID: \"6a85fe60-6391-47c1-8f31-803a742a188a\") " pod="openstack/glance-default-external-api-0" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.272748 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/786ef132-9cae-4be8-9ffa-7dc9bcd86a5a-config-data\") pod \"horizon-64c56644c5-nxbb9\" (UID: \"786ef132-9cae-4be8-9ffa-7dc9bcd86a5a\") " pod="openstack/horizon-64c56644c5-nxbb9" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.272776 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/786ef132-9cae-4be8-9ffa-7dc9bcd86a5a-horizon-secret-key\") pod \"horizon-64c56644c5-nxbb9\" (UID: \"786ef132-9cae-4be8-9ffa-7dc9bcd86a5a\") " pod="openstack/horizon-64c56644c5-nxbb9" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.272855 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tcf7n\" (UniqueName: \"kubernetes.io/projected/4f425c3a-376b-4ba1-8066-96b2d1f21698-kube-api-access-tcf7n\") pod \"barbican-db-sync-chq8s\" (UID: \"4f425c3a-376b-4ba1-8066-96b2d1f21698\") " pod="openstack/barbican-db-sync-chq8s" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.272880 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6a85fe60-6391-47c1-8f31-803a742a188a-scripts\") pod \"glance-default-external-api-0\" (UID: \"6a85fe60-6391-47c1-8f31-803a742a188a\") " pod="openstack/glance-default-external-api-0" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.272917 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6a85fe60-6391-47c1-8f31-803a742a188a-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"6a85fe60-6391-47c1-8f31-803a742a188a\") " pod="openstack/glance-default-external-api-0" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.272943 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6a85fe60-6391-47c1-8f31-803a742a188a-logs\") pod \"glance-default-external-api-0\" (UID: \"6a85fe60-6391-47c1-8f31-803a742a188a\") " pod="openstack/glance-default-external-api-0" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.272976 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c4szm\" (UniqueName: \"kubernetes.io/projected/786ef132-9cae-4be8-9ffa-7dc9bcd86a5a-kube-api-access-c4szm\") pod \"horizon-64c56644c5-nxbb9\" (UID: \"786ef132-9cae-4be8-9ffa-7dc9bcd86a5a\") " pod="openstack/horizon-64c56644c5-nxbb9" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.277430 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f425c3a-376b-4ba1-8066-96b2d1f21698-combined-ca-bundle\") pod \"barbican-db-sync-chq8s\" (UID: \"4f425c3a-376b-4ba1-8066-96b2d1f21698\") " pod="openstack/barbican-db-sync-chq8s" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.277658 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.279086 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.281371 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/4f425c3a-376b-4ba1-8066-96b2d1f21698-db-sync-config-data\") pod \"barbican-db-sync-chq8s\" (UID: \"4f425c3a-376b-4ba1-8066-96b2d1f21698\") " pod="openstack/barbican-db-sync-chq8s" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.285382 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.285763 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.290575 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.337244 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57c957c4ff-t96wz" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.355017 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tcf7n\" (UniqueName: \"kubernetes.io/projected/4f425c3a-376b-4ba1-8066-96b2d1f21698-kube-api-access-tcf7n\") pod \"barbican-db-sync-chq8s\" (UID: \"4f425c3a-376b-4ba1-8066-96b2d1f21698\") " pod="openstack/barbican-db-sync-chq8s" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.375972 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/786ef132-9cae-4be8-9ffa-7dc9bcd86a5a-config-data\") pod \"horizon-64c56644c5-nxbb9\" (UID: \"786ef132-9cae-4be8-9ffa-7dc9bcd86a5a\") " pod="openstack/horizon-64c56644c5-nxbb9" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.376051 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/786ef132-9cae-4be8-9ffa-7dc9bcd86a5a-horizon-secret-key\") pod \"horizon-64c56644c5-nxbb9\" (UID: \"786ef132-9cae-4be8-9ffa-7dc9bcd86a5a\") " pod="openstack/horizon-64c56644c5-nxbb9" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.376105 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b35ecebc-5355-4ad7-bf37-0d288eed4fdc-combined-ca-bundle\") pod \"neutron-db-sync-zvfkb\" (UID: \"b35ecebc-5355-4ad7-bf37-0d288eed4fdc\") " pod="openstack/neutron-db-sync-zvfkb" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.376132 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/42822e28-23dc-4d16-a96b-bdef97cd87b4-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"42822e28-23dc-4d16-a96b-bdef97cd87b4\") " pod="openstack/glance-default-internal-api-0" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.376181 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/42822e28-23dc-4d16-a96b-bdef97cd87b4-logs\") pod \"glance-default-internal-api-0\" (UID: \"42822e28-23dc-4d16-a96b-bdef97cd87b4\") " pod="openstack/glance-default-internal-api-0" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.376208 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6a85fe60-6391-47c1-8f31-803a742a188a-scripts\") pod \"glance-default-external-api-0\" (UID: \"6a85fe60-6391-47c1-8f31-803a742a188a\") " pod="openstack/glance-default-external-api-0" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.376231 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-internal-api-0\" (UID: \"42822e28-23dc-4d16-a96b-bdef97cd87b4\") " pod="openstack/glance-default-internal-api-0" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.376288 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6a85fe60-6391-47c1-8f31-803a742a188a-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"6a85fe60-6391-47c1-8f31-803a742a188a\") " pod="openstack/glance-default-external-api-0" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.376307 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/42822e28-23dc-4d16-a96b-bdef97cd87b4-scripts\") pod \"glance-default-internal-api-0\" (UID: \"42822e28-23dc-4d16-a96b-bdef97cd87b4\") " pod="openstack/glance-default-internal-api-0" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.376559 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6a85fe60-6391-47c1-8f31-803a742a188a-logs\") pod \"glance-default-external-api-0\" (UID: \"6a85fe60-6391-47c1-8f31-803a742a188a\") " pod="openstack/glance-default-external-api-0" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.376610 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c4szm\" (UniqueName: \"kubernetes.io/projected/786ef132-9cae-4be8-9ffa-7dc9bcd86a5a-kube-api-access-c4szm\") pod \"horizon-64c56644c5-nxbb9\" (UID: \"786ef132-9cae-4be8-9ffa-7dc9bcd86a5a\") " pod="openstack/horizon-64c56644c5-nxbb9" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.376631 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6a85fe60-6391-47c1-8f31-803a742a188a-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"6a85fe60-6391-47c1-8f31-803a742a188a\") " pod="openstack/glance-default-external-api-0" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.376675 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/786ef132-9cae-4be8-9ffa-7dc9bcd86a5a-scripts\") pod \"horizon-64c56644c5-nxbb9\" (UID: \"786ef132-9cae-4be8-9ffa-7dc9bcd86a5a\") " pod="openstack/horizon-64c56644c5-nxbb9" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.376691 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6a85fe60-6391-47c1-8f31-803a742a188a-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"6a85fe60-6391-47c1-8f31-803a742a188a\") " pod="openstack/glance-default-external-api-0" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.376706 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kttnp\" (UniqueName: \"kubernetes.io/projected/42822e28-23dc-4d16-a96b-bdef97cd87b4-kube-api-access-kttnp\") pod \"glance-default-internal-api-0\" (UID: \"42822e28-23dc-4d16-a96b-bdef97cd87b4\") " pod="openstack/glance-default-internal-api-0" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.376732 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/786ef132-9cae-4be8-9ffa-7dc9bcd86a5a-logs\") pod \"horizon-64c56644c5-nxbb9\" (UID: \"786ef132-9cae-4be8-9ffa-7dc9bcd86a5a\") " pod="openstack/horizon-64c56644c5-nxbb9" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.376768 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/42822e28-23dc-4d16-a96b-bdef97cd87b4-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"42822e28-23dc-4d16-a96b-bdef97cd87b4\") " pod="openstack/glance-default-internal-api-0" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.376789 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"6a85fe60-6391-47c1-8f31-803a742a188a\") " pod="openstack/glance-default-external-api-0" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.376835 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xqqzp\" (UniqueName: \"kubernetes.io/projected/b35ecebc-5355-4ad7-bf37-0d288eed4fdc-kube-api-access-xqqzp\") pod \"neutron-db-sync-zvfkb\" (UID: \"b35ecebc-5355-4ad7-bf37-0d288eed4fdc\") " pod="openstack/neutron-db-sync-zvfkb" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.376855 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/42822e28-23dc-4d16-a96b-bdef97cd87b4-config-data\") pod \"glance-default-internal-api-0\" (UID: \"42822e28-23dc-4d16-a96b-bdef97cd87b4\") " pod="openstack/glance-default-internal-api-0" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.376872 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rbjq6\" (UniqueName: \"kubernetes.io/projected/6a85fe60-6391-47c1-8f31-803a742a188a-kube-api-access-rbjq6\") pod \"glance-default-external-api-0\" (UID: \"6a85fe60-6391-47c1-8f31-803a742a188a\") " pod="openstack/glance-default-external-api-0" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.376908 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/b35ecebc-5355-4ad7-bf37-0d288eed4fdc-config\") pod \"neutron-db-sync-zvfkb\" (UID: \"b35ecebc-5355-4ad7-bf37-0d288eed4fdc\") " pod="openstack/neutron-db-sync-zvfkb" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.376929 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/42822e28-23dc-4d16-a96b-bdef97cd87b4-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"42822e28-23dc-4d16-a96b-bdef97cd87b4\") " pod="openstack/glance-default-internal-api-0" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.376961 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6a85fe60-6391-47c1-8f31-803a742a188a-config-data\") pod \"glance-default-external-api-0\" (UID: \"6a85fe60-6391-47c1-8f31-803a742a188a\") " pod="openstack/glance-default-external-api-0" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.378854 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/786ef132-9cae-4be8-9ffa-7dc9bcd86a5a-scripts\") pod \"horizon-64c56644c5-nxbb9\" (UID: \"786ef132-9cae-4be8-9ffa-7dc9bcd86a5a\") " pod="openstack/horizon-64c56644c5-nxbb9" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.380193 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/786ef132-9cae-4be8-9ffa-7dc9bcd86a5a-logs\") pod \"horizon-64c56644c5-nxbb9\" (UID: \"786ef132-9cae-4be8-9ffa-7dc9bcd86a5a\") " pod="openstack/horizon-64c56644c5-nxbb9" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.380138 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/786ef132-9cae-4be8-9ffa-7dc9bcd86a5a-config-data\") pod \"horizon-64c56644c5-nxbb9\" (UID: \"786ef132-9cae-4be8-9ffa-7dc9bcd86a5a\") " pod="openstack/horizon-64c56644c5-nxbb9" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.381010 4651 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"6a85fe60-6391-47c1-8f31-803a742a188a\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/glance-default-external-api-0" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.381441 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6a85fe60-6391-47c1-8f31-803a742a188a-logs\") pod \"glance-default-external-api-0\" (UID: \"6a85fe60-6391-47c1-8f31-803a742a188a\") " pod="openstack/glance-default-external-api-0" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.381762 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6a85fe60-6391-47c1-8f31-803a742a188a-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"6a85fe60-6391-47c1-8f31-803a742a188a\") " pod="openstack/glance-default-external-api-0" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.381778 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6a85fe60-6391-47c1-8f31-803a742a188a-config-data\") pod \"glance-default-external-api-0\" (UID: \"6a85fe60-6391-47c1-8f31-803a742a188a\") " pod="openstack/glance-default-external-api-0" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.383802 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6a85fe60-6391-47c1-8f31-803a742a188a-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"6a85fe60-6391-47c1-8f31-803a742a188a\") " pod="openstack/glance-default-external-api-0" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.387186 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/786ef132-9cae-4be8-9ffa-7dc9bcd86a5a-horizon-secret-key\") pod \"horizon-64c56644c5-nxbb9\" (UID: \"786ef132-9cae-4be8-9ffa-7dc9bcd86a5a\") " pod="openstack/horizon-64c56644c5-nxbb9" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.390394 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6a85fe60-6391-47c1-8f31-803a742a188a-scripts\") pod \"glance-default-external-api-0\" (UID: \"6a85fe60-6391-47c1-8f31-803a742a188a\") " pod="openstack/glance-default-external-api-0" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.393646 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6a85fe60-6391-47c1-8f31-803a742a188a-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"6a85fe60-6391-47c1-8f31-803a742a188a\") " pod="openstack/glance-default-external-api-0" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.399694 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rbjq6\" (UniqueName: \"kubernetes.io/projected/6a85fe60-6391-47c1-8f31-803a742a188a-kube-api-access-rbjq6\") pod \"glance-default-external-api-0\" (UID: \"6a85fe60-6391-47c1-8f31-803a742a188a\") " pod="openstack/glance-default-external-api-0" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.400314 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c4szm\" (UniqueName: \"kubernetes.io/projected/786ef132-9cae-4be8-9ffa-7dc9bcd86a5a-kube-api-access-c4szm\") pod \"horizon-64c56644c5-nxbb9\" (UID: \"786ef132-9cae-4be8-9ffa-7dc9bcd86a5a\") " pod="openstack/horizon-64c56644c5-nxbb9" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.417433 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-8rxgw" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.464590 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-kb7c9"] Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.472328 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-64c56644c5-nxbb9" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.478492 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b35ecebc-5355-4ad7-bf37-0d288eed4fdc-combined-ca-bundle\") pod \"neutron-db-sync-zvfkb\" (UID: \"b35ecebc-5355-4ad7-bf37-0d288eed4fdc\") " pod="openstack/neutron-db-sync-zvfkb" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.478536 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/42822e28-23dc-4d16-a96b-bdef97cd87b4-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"42822e28-23dc-4d16-a96b-bdef97cd87b4\") " pod="openstack/glance-default-internal-api-0" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.478562 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/42822e28-23dc-4d16-a96b-bdef97cd87b4-logs\") pod \"glance-default-internal-api-0\" (UID: \"42822e28-23dc-4d16-a96b-bdef97cd87b4\") " pod="openstack/glance-default-internal-api-0" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.478592 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-internal-api-0\" (UID: \"42822e28-23dc-4d16-a96b-bdef97cd87b4\") " pod="openstack/glance-default-internal-api-0" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.478616 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/42822e28-23dc-4d16-a96b-bdef97cd87b4-scripts\") pod \"glance-default-internal-api-0\" (UID: \"42822e28-23dc-4d16-a96b-bdef97cd87b4\") " pod="openstack/glance-default-internal-api-0" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.478647 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kttnp\" (UniqueName: \"kubernetes.io/projected/42822e28-23dc-4d16-a96b-bdef97cd87b4-kube-api-access-kttnp\") pod \"glance-default-internal-api-0\" (UID: \"42822e28-23dc-4d16-a96b-bdef97cd87b4\") " pod="openstack/glance-default-internal-api-0" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.478676 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/42822e28-23dc-4d16-a96b-bdef97cd87b4-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"42822e28-23dc-4d16-a96b-bdef97cd87b4\") " pod="openstack/glance-default-internal-api-0" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.478707 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xqqzp\" (UniqueName: \"kubernetes.io/projected/b35ecebc-5355-4ad7-bf37-0d288eed4fdc-kube-api-access-xqqzp\") pod \"neutron-db-sync-zvfkb\" (UID: \"b35ecebc-5355-4ad7-bf37-0d288eed4fdc\") " pod="openstack/neutron-db-sync-zvfkb" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.478724 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/42822e28-23dc-4d16-a96b-bdef97cd87b4-config-data\") pod \"glance-default-internal-api-0\" (UID: \"42822e28-23dc-4d16-a96b-bdef97cd87b4\") " pod="openstack/glance-default-internal-api-0" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.478741 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/b35ecebc-5355-4ad7-bf37-0d288eed4fdc-config\") pod \"neutron-db-sync-zvfkb\" (UID: \"b35ecebc-5355-4ad7-bf37-0d288eed4fdc\") " pod="openstack/neutron-db-sync-zvfkb" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.478759 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/42822e28-23dc-4d16-a96b-bdef97cd87b4-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"42822e28-23dc-4d16-a96b-bdef97cd87b4\") " pod="openstack/glance-default-internal-api-0" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.481522 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/42822e28-23dc-4d16-a96b-bdef97cd87b4-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"42822e28-23dc-4d16-a96b-bdef97cd87b4\") " pod="openstack/glance-default-internal-api-0" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.481930 4651 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-internal-api-0\" (UID: \"42822e28-23dc-4d16-a96b-bdef97cd87b4\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/glance-default-internal-api-0" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.482258 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/42822e28-23dc-4d16-a96b-bdef97cd87b4-logs\") pod \"glance-default-internal-api-0\" (UID: \"42822e28-23dc-4d16-a96b-bdef97cd87b4\") " pod="openstack/glance-default-internal-api-0" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.494786 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/42822e28-23dc-4d16-a96b-bdef97cd87b4-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"42822e28-23dc-4d16-a96b-bdef97cd87b4\") " pod="openstack/glance-default-internal-api-0" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.495037 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/42822e28-23dc-4d16-a96b-bdef97cd87b4-config-data\") pod \"glance-default-internal-api-0\" (UID: \"42822e28-23dc-4d16-a96b-bdef97cd87b4\") " pod="openstack/glance-default-internal-api-0" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.495406 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/42822e28-23dc-4d16-a96b-bdef97cd87b4-scripts\") pod \"glance-default-internal-api-0\" (UID: \"42822e28-23dc-4d16-a96b-bdef97cd87b4\") " pod="openstack/glance-default-internal-api-0" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.497744 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/b35ecebc-5355-4ad7-bf37-0d288eed4fdc-config\") pod \"neutron-db-sync-zvfkb\" (UID: \"b35ecebc-5355-4ad7-bf37-0d288eed4fdc\") " pod="openstack/neutron-db-sync-zvfkb" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.499183 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xqqzp\" (UniqueName: \"kubernetes.io/projected/b35ecebc-5355-4ad7-bf37-0d288eed4fdc-kube-api-access-xqqzp\") pod \"neutron-db-sync-zvfkb\" (UID: \"b35ecebc-5355-4ad7-bf37-0d288eed4fdc\") " pod="openstack/neutron-db-sync-zvfkb" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.499433 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"6a85fe60-6391-47c1-8f31-803a742a188a\") " pod="openstack/glance-default-external-api-0" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.499977 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kttnp\" (UniqueName: \"kubernetes.io/projected/42822e28-23dc-4d16-a96b-bdef97cd87b4-kube-api-access-kttnp\") pod \"glance-default-internal-api-0\" (UID: \"42822e28-23dc-4d16-a96b-bdef97cd87b4\") " pod="openstack/glance-default-internal-api-0" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.500659 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/42822e28-23dc-4d16-a96b-bdef97cd87b4-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"42822e28-23dc-4d16-a96b-bdef97cd87b4\") " pod="openstack/glance-default-internal-api-0" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.500886 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-chq8s" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.501947 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b35ecebc-5355-4ad7-bf37-0d288eed4fdc-combined-ca-bundle\") pod \"neutron-db-sync-zvfkb\" (UID: \"b35ecebc-5355-4ad7-bf37-0d288eed4fdc\") " pod="openstack/neutron-db-sync-zvfkb" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.511381 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.530992 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-internal-api-0\" (UID: \"42822e28-23dc-4d16-a96b-bdef97cd87b4\") " pod="openstack/glance-default-internal-api-0" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.634610 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-zvfkb" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.648222 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6c9c9f998c-gdq6v"] Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.673731 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.790872 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 11 05:07:37 crc kubenswrapper[4651]: I1011 05:07:37.981096 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-ctfxs"] Oct 11 05:07:38 crc kubenswrapper[4651]: I1011 05:07:38.015540 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-69899bbb49-h5pnl"] Oct 11 05:07:38 crc kubenswrapper[4651]: I1011 05:07:38.102018 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-895cf5cf-n27b7" Oct 11 05:07:38 crc kubenswrapper[4651]: I1011 05:07:38.123642 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57c957c4ff-t96wz"] Oct 11 05:07:38 crc kubenswrapper[4651]: I1011 05:07:38.177351 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-8rxgw"] Oct 11 05:07:38 crc kubenswrapper[4651]: I1011 05:07:38.196567 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d6e586e3-8d32-4116-a47d-fd410e4c2c2d-ovsdbserver-nb\") pod \"d6e586e3-8d32-4116-a47d-fd410e4c2c2d\" (UID: \"d6e586e3-8d32-4116-a47d-fd410e4c2c2d\") " Oct 11 05:07:38 crc kubenswrapper[4651]: I1011 05:07:38.196732 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d6e586e3-8d32-4116-a47d-fd410e4c2c2d-ovsdbserver-sb\") pod \"d6e586e3-8d32-4116-a47d-fd410e4c2c2d\" (UID: \"d6e586e3-8d32-4116-a47d-fd410e4c2c2d\") " Oct 11 05:07:38 crc kubenswrapper[4651]: I1011 05:07:38.196810 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d6e586e3-8d32-4116-a47d-fd410e4c2c2d-dns-swift-storage-0\") pod \"d6e586e3-8d32-4116-a47d-fd410e4c2c2d\" (UID: \"d6e586e3-8d32-4116-a47d-fd410e4c2c2d\") " Oct 11 05:07:38 crc kubenswrapper[4651]: I1011 05:07:38.196887 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gvhmz\" (UniqueName: \"kubernetes.io/projected/d6e586e3-8d32-4116-a47d-fd410e4c2c2d-kube-api-access-gvhmz\") pod \"d6e586e3-8d32-4116-a47d-fd410e4c2c2d\" (UID: \"d6e586e3-8d32-4116-a47d-fd410e4c2c2d\") " Oct 11 05:07:38 crc kubenswrapper[4651]: I1011 05:07:38.196929 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d6e586e3-8d32-4116-a47d-fd410e4c2c2d-config\") pod \"d6e586e3-8d32-4116-a47d-fd410e4c2c2d\" (UID: \"d6e586e3-8d32-4116-a47d-fd410e4c2c2d\") " Oct 11 05:07:38 crc kubenswrapper[4651]: I1011 05:07:38.196961 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d6e586e3-8d32-4116-a47d-fd410e4c2c2d-dns-svc\") pod \"d6e586e3-8d32-4116-a47d-fd410e4c2c2d\" (UID: \"d6e586e3-8d32-4116-a47d-fd410e4c2c2d\") " Oct 11 05:07:38 crc kubenswrapper[4651]: I1011 05:07:38.199440 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-ctfxs" event={"ID":"96c528a9-d9c6-4eec-b63f-5bba189744ae","Type":"ContainerStarted","Data":"6a883eefff09ee56dab9b3ee93690f306d8748c8c371d1f0184ee0f0f274f676"} Oct 11 05:07:38 crc kubenswrapper[4651]: I1011 05:07:38.202604 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"16b0d24a-e647-4381-9f03-9b48c34ba52f","Type":"ContainerStarted","Data":"d168b82302ecf2149ab4bf095710ed519c5de5178b38b3f144cac0c3cef2dd1b"} Oct 11 05:07:38 crc kubenswrapper[4651]: I1011 05:07:38.206165 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-69899bbb49-h5pnl" event={"ID":"fda7363a-5096-4fd1-9549-dcd2e5c4f70b","Type":"ContainerStarted","Data":"4b53827ad2a9f8ae815036c1f2636fb74161fdd0b109c2bec44ed8bb88b29c08"} Oct 11 05:07:38 crc kubenswrapper[4651]: I1011 05:07:38.207672 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d6e586e3-8d32-4116-a47d-fd410e4c2c2d-kube-api-access-gvhmz" (OuterVolumeSpecName: "kube-api-access-gvhmz") pod "d6e586e3-8d32-4116-a47d-fd410e4c2c2d" (UID: "d6e586e3-8d32-4116-a47d-fd410e4c2c2d"). InnerVolumeSpecName "kube-api-access-gvhmz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:07:38 crc kubenswrapper[4651]: I1011 05:07:38.213133 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57c957c4ff-t96wz" event={"ID":"90edbdf3-b435-459f-9a74-3f9ea9ace40f","Type":"ContainerStarted","Data":"3e7e41767dc31430b727bb45ff706b3bafecd52a1e2f81bbb0fa3d4dd063d673"} Oct 11 05:07:38 crc kubenswrapper[4651]: I1011 05:07:38.221627 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c9c9f998c-gdq6v" event={"ID":"80d6eea1-c7e1-4f5e-aa4a-ec74723db231","Type":"ContainerStarted","Data":"032b508cbc9772d8c74c69cbec74395857f112ee8a6a880e143627761cfc6368"} Oct 11 05:07:38 crc kubenswrapper[4651]: I1011 05:07:38.250571 4651 generic.go:334] "Generic (PLEG): container finished" podID="d6e586e3-8d32-4116-a47d-fd410e4c2c2d" containerID="d018b4446595cff8a0ec737a2c8414a79b19c0db7a4196134a65a47cb59b50b0" exitCode=0 Oct 11 05:07:38 crc kubenswrapper[4651]: I1011 05:07:38.250717 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-895cf5cf-n27b7" Oct 11 05:07:38 crc kubenswrapper[4651]: I1011 05:07:38.250782 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-895cf5cf-n27b7" event={"ID":"d6e586e3-8d32-4116-a47d-fd410e4c2c2d","Type":"ContainerDied","Data":"d018b4446595cff8a0ec737a2c8414a79b19c0db7a4196134a65a47cb59b50b0"} Oct 11 05:07:38 crc kubenswrapper[4651]: I1011 05:07:38.251084 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-895cf5cf-n27b7" event={"ID":"d6e586e3-8d32-4116-a47d-fd410e4c2c2d","Type":"ContainerDied","Data":"ad064dcd664902900a58fe4898c9ef18cf40b5620e7ebe046021642abea7d426"} Oct 11 05:07:38 crc kubenswrapper[4651]: I1011 05:07:38.251137 4651 scope.go:117] "RemoveContainer" containerID="d018b4446595cff8a0ec737a2c8414a79b19c0db7a4196134a65a47cb59b50b0" Oct 11 05:07:38 crc kubenswrapper[4651]: I1011 05:07:38.259603 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-kb7c9" event={"ID":"81c9e0ff-d205-4fa2-9606-5ba89c367008","Type":"ContainerStarted","Data":"14e8fe56227b1d8e90e55bf72454b7340832262f6b83b37e348ad57caa633447"} Oct 11 05:07:38 crc kubenswrapper[4651]: I1011 05:07:38.260843 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d6e586e3-8d32-4116-a47d-fd410e4c2c2d-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "d6e586e3-8d32-4116-a47d-fd410e4c2c2d" (UID: "d6e586e3-8d32-4116-a47d-fd410e4c2c2d"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:07:38 crc kubenswrapper[4651]: I1011 05:07:38.272845 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d6e586e3-8d32-4116-a47d-fd410e4c2c2d-config" (OuterVolumeSpecName: "config") pod "d6e586e3-8d32-4116-a47d-fd410e4c2c2d" (UID: "d6e586e3-8d32-4116-a47d-fd410e4c2c2d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:07:38 crc kubenswrapper[4651]: I1011 05:07:38.273300 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d6e586e3-8d32-4116-a47d-fd410e4c2c2d-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "d6e586e3-8d32-4116-a47d-fd410e4c2c2d" (UID: "d6e586e3-8d32-4116-a47d-fd410e4c2c2d"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:07:38 crc kubenswrapper[4651]: I1011 05:07:38.282599 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-kb7c9" podStartSLOduration=2.282577994 podStartE2EDuration="2.282577994s" podCreationTimestamp="2025-10-11 05:07:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 05:07:38.276264633 +0000 UTC m=+979.172497419" watchObservedRunningTime="2025-10-11 05:07:38.282577994 +0000 UTC m=+979.178810790" Oct 11 05:07:38 crc kubenswrapper[4651]: I1011 05:07:38.292609 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d6e586e3-8d32-4116-a47d-fd410e4c2c2d-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "d6e586e3-8d32-4116-a47d-fd410e4c2c2d" (UID: "d6e586e3-8d32-4116-a47d-fd410e4c2c2d"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:07:38 crc kubenswrapper[4651]: I1011 05:07:38.298152 4651 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d6e586e3-8d32-4116-a47d-fd410e4c2c2d-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 11 05:07:38 crc kubenswrapper[4651]: I1011 05:07:38.298184 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gvhmz\" (UniqueName: \"kubernetes.io/projected/d6e586e3-8d32-4116-a47d-fd410e4c2c2d-kube-api-access-gvhmz\") on node \"crc\" DevicePath \"\"" Oct 11 05:07:38 crc kubenswrapper[4651]: I1011 05:07:38.298235 4651 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d6e586e3-8d32-4116-a47d-fd410e4c2c2d-config\") on node \"crc\" DevicePath \"\"" Oct 11 05:07:38 crc kubenswrapper[4651]: I1011 05:07:38.298248 4651 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d6e586e3-8d32-4116-a47d-fd410e4c2c2d-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 11 05:07:38 crc kubenswrapper[4651]: I1011 05:07:38.298260 4651 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d6e586e3-8d32-4116-a47d-fd410e4c2c2d-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 11 05:07:38 crc kubenswrapper[4651]: I1011 05:07:38.310730 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d6e586e3-8d32-4116-a47d-fd410e4c2c2d-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "d6e586e3-8d32-4116-a47d-fd410e4c2c2d" (UID: "d6e586e3-8d32-4116-a47d-fd410e4c2c2d"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:07:38 crc kubenswrapper[4651]: I1011 05:07:38.328863 4651 scope.go:117] "RemoveContainer" containerID="12b891089e4c4450c642a0b7730af16d6077ac2bea6a07c21e3047862940ec1d" Oct 11 05:07:38 crc kubenswrapper[4651]: I1011 05:07:38.365531 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-64c56644c5-nxbb9"] Oct 11 05:07:38 crc kubenswrapper[4651]: I1011 05:07:38.399549 4651 scope.go:117] "RemoveContainer" containerID="d018b4446595cff8a0ec737a2c8414a79b19c0db7a4196134a65a47cb59b50b0" Oct 11 05:07:38 crc kubenswrapper[4651]: I1011 05:07:38.400725 4651 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d6e586e3-8d32-4116-a47d-fd410e4c2c2d-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 11 05:07:38 crc kubenswrapper[4651]: E1011 05:07:38.402380 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d018b4446595cff8a0ec737a2c8414a79b19c0db7a4196134a65a47cb59b50b0\": container with ID starting with d018b4446595cff8a0ec737a2c8414a79b19c0db7a4196134a65a47cb59b50b0 not found: ID does not exist" containerID="d018b4446595cff8a0ec737a2c8414a79b19c0db7a4196134a65a47cb59b50b0" Oct 11 05:07:38 crc kubenswrapper[4651]: I1011 05:07:38.402428 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d018b4446595cff8a0ec737a2c8414a79b19c0db7a4196134a65a47cb59b50b0"} err="failed to get container status \"d018b4446595cff8a0ec737a2c8414a79b19c0db7a4196134a65a47cb59b50b0\": rpc error: code = NotFound desc = could not find container \"d018b4446595cff8a0ec737a2c8414a79b19c0db7a4196134a65a47cb59b50b0\": container with ID starting with d018b4446595cff8a0ec737a2c8414a79b19c0db7a4196134a65a47cb59b50b0 not found: ID does not exist" Oct 11 05:07:38 crc kubenswrapper[4651]: I1011 05:07:38.402462 4651 scope.go:117] "RemoveContainer" containerID="12b891089e4c4450c642a0b7730af16d6077ac2bea6a07c21e3047862940ec1d" Oct 11 05:07:38 crc kubenswrapper[4651]: E1011 05:07:38.403319 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"12b891089e4c4450c642a0b7730af16d6077ac2bea6a07c21e3047862940ec1d\": container with ID starting with 12b891089e4c4450c642a0b7730af16d6077ac2bea6a07c21e3047862940ec1d not found: ID does not exist" containerID="12b891089e4c4450c642a0b7730af16d6077ac2bea6a07c21e3047862940ec1d" Oct 11 05:07:38 crc kubenswrapper[4651]: I1011 05:07:38.403360 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"12b891089e4c4450c642a0b7730af16d6077ac2bea6a07c21e3047862940ec1d"} err="failed to get container status \"12b891089e4c4450c642a0b7730af16d6077ac2bea6a07c21e3047862940ec1d\": rpc error: code = NotFound desc = could not find container \"12b891089e4c4450c642a0b7730af16d6077ac2bea6a07c21e3047862940ec1d\": container with ID starting with 12b891089e4c4450c642a0b7730af16d6077ac2bea6a07c21e3047862940ec1d not found: ID does not exist" Oct 11 05:07:39 crc kubenswrapper[4651]: I1011 05:07:38.459229 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-chq8s"] Oct 11 05:07:39 crc kubenswrapper[4651]: I1011 05:07:38.508508 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 11 05:07:39 crc kubenswrapper[4651]: W1011 05:07:38.518232 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6a85fe60_6391_47c1_8f31_803a742a188a.slice/crio-8765069af190c8e8ee1ef9958d9cae748d35209a2a19ac8617512c46b82b81d1 WatchSource:0}: Error finding container 8765069af190c8e8ee1ef9958d9cae748d35209a2a19ac8617512c46b82b81d1: Status 404 returned error can't find the container with id 8765069af190c8e8ee1ef9958d9cae748d35209a2a19ac8617512c46b82b81d1 Oct 11 05:07:39 crc kubenswrapper[4651]: I1011 05:07:38.571813 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-zvfkb"] Oct 11 05:07:39 crc kubenswrapper[4651]: I1011 05:07:38.604665 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-895cf5cf-n27b7"] Oct 11 05:07:39 crc kubenswrapper[4651]: I1011 05:07:38.610870 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-895cf5cf-n27b7"] Oct 11 05:07:39 crc kubenswrapper[4651]: I1011 05:07:38.751164 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 11 05:07:39 crc kubenswrapper[4651]: I1011 05:07:39.284631 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-64c56644c5-nxbb9" event={"ID":"786ef132-9cae-4be8-9ffa-7dc9bcd86a5a","Type":"ContainerStarted","Data":"bfe9e49291f3638cd5c724fc86fa068855c5296d243b3727a829624966544861"} Oct 11 05:07:39 crc kubenswrapper[4651]: I1011 05:07:39.286140 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-8rxgw" event={"ID":"36b45d75-4e52-49b7-b7d7-13d53d2f7076","Type":"ContainerStarted","Data":"4bfa103027f7526766f59ef9bd0531914559ad883df5930f932503999b6165f3"} Oct 11 05:07:39 crc kubenswrapper[4651]: I1011 05:07:39.309868 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"6a85fe60-6391-47c1-8f31-803a742a188a","Type":"ContainerStarted","Data":"364c735ce9a35a6c40d89769547a9abdb6faacdb3527c3a12b8695f37b1b0cda"} Oct 11 05:07:39 crc kubenswrapper[4651]: I1011 05:07:39.309911 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"6a85fe60-6391-47c1-8f31-803a742a188a","Type":"ContainerStarted","Data":"8765069af190c8e8ee1ef9958d9cae748d35209a2a19ac8617512c46b82b81d1"} Oct 11 05:07:39 crc kubenswrapper[4651]: I1011 05:07:39.325284 4651 generic.go:334] "Generic (PLEG): container finished" podID="80d6eea1-c7e1-4f5e-aa4a-ec74723db231" containerID="d557ce77ac7955e1756bad63d3ab4d77ca1809675ca59e700ec15d866f1f2c79" exitCode=0 Oct 11 05:07:39 crc kubenswrapper[4651]: I1011 05:07:39.325789 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c9c9f998c-gdq6v" event={"ID":"80d6eea1-c7e1-4f5e-aa4a-ec74723db231","Type":"ContainerDied","Data":"d557ce77ac7955e1756bad63d3ab4d77ca1809675ca59e700ec15d866f1f2c79"} Oct 11 05:07:39 crc kubenswrapper[4651]: I1011 05:07:39.349377 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-kb7c9" event={"ID":"81c9e0ff-d205-4fa2-9606-5ba89c367008","Type":"ContainerStarted","Data":"9afbbc7cb4c14d6ce0164bfe604ff8f34b985ca3fbb40debe1e9f79f85e3940b"} Oct 11 05:07:39 crc kubenswrapper[4651]: I1011 05:07:39.361749 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-chq8s" event={"ID":"4f425c3a-376b-4ba1-8066-96b2d1f21698","Type":"ContainerStarted","Data":"54246d689260572ea569e72fed4d6476787b1afba4cc68eaed8d65e0b0a33441"} Oct 11 05:07:39 crc kubenswrapper[4651]: I1011 05:07:39.365549 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"42822e28-23dc-4d16-a96b-bdef97cd87b4","Type":"ContainerStarted","Data":"065b75751b1cfac0965af70f6c38aeffd04cc9d151c8e04f8a9147ebb29f6e7b"} Oct 11 05:07:39 crc kubenswrapper[4651]: I1011 05:07:39.367709 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-zvfkb" event={"ID":"b35ecebc-5355-4ad7-bf37-0d288eed4fdc","Type":"ContainerStarted","Data":"7e749e6c5e39c8b229cde18437b792d4641bdca4e4c93ef3ba37af438e266041"} Oct 11 05:07:39 crc kubenswrapper[4651]: I1011 05:07:39.367739 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-zvfkb" event={"ID":"b35ecebc-5355-4ad7-bf37-0d288eed4fdc","Type":"ContainerStarted","Data":"c462253d5364923f564211a3559ee702c4cd925584eb7a81b2dc0b09efb1849b"} Oct 11 05:07:39 crc kubenswrapper[4651]: I1011 05:07:39.371037 4651 generic.go:334] "Generic (PLEG): container finished" podID="90edbdf3-b435-459f-9a74-3f9ea9ace40f" containerID="ab365c1772db9f96df995b71f01fde27590241651930e87f697505025de79fc4" exitCode=0 Oct 11 05:07:39 crc kubenswrapper[4651]: I1011 05:07:39.371086 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57c957c4ff-t96wz" event={"ID":"90edbdf3-b435-459f-9a74-3f9ea9ace40f","Type":"ContainerDied","Data":"ab365c1772db9f96df995b71f01fde27590241651930e87f697505025de79fc4"} Oct 11 05:07:39 crc kubenswrapper[4651]: I1011 05:07:39.402429 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-zvfkb" podStartSLOduration=2.40239722 podStartE2EDuration="2.40239722s" podCreationTimestamp="2025-10-11 05:07:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 05:07:39.393740549 +0000 UTC m=+980.289973345" watchObservedRunningTime="2025-10-11 05:07:39.40239722 +0000 UTC m=+980.298630016" Oct 11 05:07:39 crc kubenswrapper[4651]: I1011 05:07:39.671062 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c9c9f998c-gdq6v" Oct 11 05:07:39 crc kubenswrapper[4651]: I1011 05:07:39.852699 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/80d6eea1-c7e1-4f5e-aa4a-ec74723db231-dns-swift-storage-0\") pod \"80d6eea1-c7e1-4f5e-aa4a-ec74723db231\" (UID: \"80d6eea1-c7e1-4f5e-aa4a-ec74723db231\") " Oct 11 05:07:39 crc kubenswrapper[4651]: I1011 05:07:39.853193 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5v2x2\" (UniqueName: \"kubernetes.io/projected/80d6eea1-c7e1-4f5e-aa4a-ec74723db231-kube-api-access-5v2x2\") pod \"80d6eea1-c7e1-4f5e-aa4a-ec74723db231\" (UID: \"80d6eea1-c7e1-4f5e-aa4a-ec74723db231\") " Oct 11 05:07:39 crc kubenswrapper[4651]: I1011 05:07:39.853261 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/80d6eea1-c7e1-4f5e-aa4a-ec74723db231-config\") pod \"80d6eea1-c7e1-4f5e-aa4a-ec74723db231\" (UID: \"80d6eea1-c7e1-4f5e-aa4a-ec74723db231\") " Oct 11 05:07:39 crc kubenswrapper[4651]: I1011 05:07:39.853430 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/80d6eea1-c7e1-4f5e-aa4a-ec74723db231-ovsdbserver-sb\") pod \"80d6eea1-c7e1-4f5e-aa4a-ec74723db231\" (UID: \"80d6eea1-c7e1-4f5e-aa4a-ec74723db231\") " Oct 11 05:07:39 crc kubenswrapper[4651]: I1011 05:07:39.853530 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/80d6eea1-c7e1-4f5e-aa4a-ec74723db231-dns-svc\") pod \"80d6eea1-c7e1-4f5e-aa4a-ec74723db231\" (UID: \"80d6eea1-c7e1-4f5e-aa4a-ec74723db231\") " Oct 11 05:07:39 crc kubenswrapper[4651]: I1011 05:07:39.853602 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/80d6eea1-c7e1-4f5e-aa4a-ec74723db231-ovsdbserver-nb\") pod \"80d6eea1-c7e1-4f5e-aa4a-ec74723db231\" (UID: \"80d6eea1-c7e1-4f5e-aa4a-ec74723db231\") " Oct 11 05:07:39 crc kubenswrapper[4651]: I1011 05:07:39.864040 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/80d6eea1-c7e1-4f5e-aa4a-ec74723db231-kube-api-access-5v2x2" (OuterVolumeSpecName: "kube-api-access-5v2x2") pod "80d6eea1-c7e1-4f5e-aa4a-ec74723db231" (UID: "80d6eea1-c7e1-4f5e-aa4a-ec74723db231"). InnerVolumeSpecName "kube-api-access-5v2x2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:07:39 crc kubenswrapper[4651]: I1011 05:07:39.884502 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/80d6eea1-c7e1-4f5e-aa4a-ec74723db231-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "80d6eea1-c7e1-4f5e-aa4a-ec74723db231" (UID: "80d6eea1-c7e1-4f5e-aa4a-ec74723db231"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:07:39 crc kubenswrapper[4651]: I1011 05:07:39.904346 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/80d6eea1-c7e1-4f5e-aa4a-ec74723db231-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "80d6eea1-c7e1-4f5e-aa4a-ec74723db231" (UID: "80d6eea1-c7e1-4f5e-aa4a-ec74723db231"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:07:39 crc kubenswrapper[4651]: I1011 05:07:39.906071 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d6e586e3-8d32-4116-a47d-fd410e4c2c2d" path="/var/lib/kubelet/pods/d6e586e3-8d32-4116-a47d-fd410e4c2c2d/volumes" Oct 11 05:07:39 crc kubenswrapper[4651]: I1011 05:07:39.955787 4651 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/80d6eea1-c7e1-4f5e-aa4a-ec74723db231-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 11 05:07:39 crc kubenswrapper[4651]: I1011 05:07:39.955831 4651 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/80d6eea1-c7e1-4f5e-aa4a-ec74723db231-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 11 05:07:39 crc kubenswrapper[4651]: I1011 05:07:39.955840 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5v2x2\" (UniqueName: \"kubernetes.io/projected/80d6eea1-c7e1-4f5e-aa4a-ec74723db231-kube-api-access-5v2x2\") on node \"crc\" DevicePath \"\"" Oct 11 05:07:40 crc kubenswrapper[4651]: I1011 05:07:40.075009 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/80d6eea1-c7e1-4f5e-aa4a-ec74723db231-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "80d6eea1-c7e1-4f5e-aa4a-ec74723db231" (UID: "80d6eea1-c7e1-4f5e-aa4a-ec74723db231"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:07:40 crc kubenswrapper[4651]: I1011 05:07:40.127624 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/80d6eea1-c7e1-4f5e-aa4a-ec74723db231-config" (OuterVolumeSpecName: "config") pod "80d6eea1-c7e1-4f5e-aa4a-ec74723db231" (UID: "80d6eea1-c7e1-4f5e-aa4a-ec74723db231"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:07:40 crc kubenswrapper[4651]: I1011 05:07:40.134725 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/80d6eea1-c7e1-4f5e-aa4a-ec74723db231-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "80d6eea1-c7e1-4f5e-aa4a-ec74723db231" (UID: "80d6eea1-c7e1-4f5e-aa4a-ec74723db231"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:07:40 crc kubenswrapper[4651]: I1011 05:07:40.159252 4651 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/80d6eea1-c7e1-4f5e-aa4a-ec74723db231-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 11 05:07:40 crc kubenswrapper[4651]: I1011 05:07:40.159319 4651 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/80d6eea1-c7e1-4f5e-aa4a-ec74723db231-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 11 05:07:40 crc kubenswrapper[4651]: I1011 05:07:40.159335 4651 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/80d6eea1-c7e1-4f5e-aa4a-ec74723db231-config\") on node \"crc\" DevicePath \"\"" Oct 11 05:07:40 crc kubenswrapper[4651]: I1011 05:07:40.310123 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 11 05:07:40 crc kubenswrapper[4651]: I1011 05:07:40.359761 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-64c56644c5-nxbb9"] Oct 11 05:07:40 crc kubenswrapper[4651]: I1011 05:07:40.392543 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-75955bb549-s67lw"] Oct 11 05:07:40 crc kubenswrapper[4651]: E1011 05:07:40.392988 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="80d6eea1-c7e1-4f5e-aa4a-ec74723db231" containerName="init" Oct 11 05:07:40 crc kubenswrapper[4651]: I1011 05:07:40.393006 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="80d6eea1-c7e1-4f5e-aa4a-ec74723db231" containerName="init" Oct 11 05:07:40 crc kubenswrapper[4651]: E1011 05:07:40.393022 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d6e586e3-8d32-4116-a47d-fd410e4c2c2d" containerName="dnsmasq-dns" Oct 11 05:07:40 crc kubenswrapper[4651]: I1011 05:07:40.393028 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="d6e586e3-8d32-4116-a47d-fd410e4c2c2d" containerName="dnsmasq-dns" Oct 11 05:07:40 crc kubenswrapper[4651]: E1011 05:07:40.393050 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d6e586e3-8d32-4116-a47d-fd410e4c2c2d" containerName="init" Oct 11 05:07:40 crc kubenswrapper[4651]: I1011 05:07:40.393057 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="d6e586e3-8d32-4116-a47d-fd410e4c2c2d" containerName="init" Oct 11 05:07:40 crc kubenswrapper[4651]: I1011 05:07:40.393236 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="80d6eea1-c7e1-4f5e-aa4a-ec74723db231" containerName="init" Oct 11 05:07:40 crc kubenswrapper[4651]: I1011 05:07:40.393247 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="d6e586e3-8d32-4116-a47d-fd410e4c2c2d" containerName="dnsmasq-dns" Oct 11 05:07:40 crc kubenswrapper[4651]: I1011 05:07:40.402435 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 11 05:07:40 crc kubenswrapper[4651]: I1011 05:07:40.402594 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-75955bb549-s67lw" Oct 11 05:07:40 crc kubenswrapper[4651]: I1011 05:07:40.415914 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-75955bb549-s67lw"] Oct 11 05:07:40 crc kubenswrapper[4651]: I1011 05:07:40.433982 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c9c9f998c-gdq6v" event={"ID":"80d6eea1-c7e1-4f5e-aa4a-ec74723db231","Type":"ContainerDied","Data":"032b508cbc9772d8c74c69cbec74395857f112ee8a6a880e143627761cfc6368"} Oct 11 05:07:40 crc kubenswrapper[4651]: I1011 05:07:40.434035 4651 scope.go:117] "RemoveContainer" containerID="d557ce77ac7955e1756bad63d3ab4d77ca1809675ca59e700ec15d866f1f2c79" Oct 11 05:07:40 crc kubenswrapper[4651]: I1011 05:07:40.434158 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c9c9f998c-gdq6v" Oct 11 05:07:40 crc kubenswrapper[4651]: I1011 05:07:40.466081 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="6a85fe60-6391-47c1-8f31-803a742a188a" containerName="glance-log" containerID="cri-o://364c735ce9a35a6c40d89769547a9abdb6faacdb3527c3a12b8695f37b1b0cda" gracePeriod=30 Oct 11 05:07:40 crc kubenswrapper[4651]: I1011 05:07:40.466212 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="6a85fe60-6391-47c1-8f31-803a742a188a" containerName="glance-httpd" containerID="cri-o://43370a1e523b69ae25c3f37878bae64097cdb6429a7989d07ab68ed8bfc6f832" gracePeriod=30 Oct 11 05:07:40 crc kubenswrapper[4651]: I1011 05:07:40.467378 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 11 05:07:40 crc kubenswrapper[4651]: I1011 05:07:40.473476 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"42822e28-23dc-4d16-a96b-bdef97cd87b4","Type":"ContainerStarted","Data":"dd08989de1f1be8d0b72125ccc1ec1502812a5ebad8ea6145fc6cf2c3a55f7b8"} Oct 11 05:07:40 crc kubenswrapper[4651]: I1011 05:07:40.509852 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57c957c4ff-t96wz" event={"ID":"90edbdf3-b435-459f-9a74-3f9ea9ace40f","Type":"ContainerStarted","Data":"07cb7d8f88667b10b54ad08748c84e647f86e6063360be6a1be28033d4f359f2"} Oct 11 05:07:40 crc kubenswrapper[4651]: I1011 05:07:40.525790 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6c9c9f998c-gdq6v"] Oct 11 05:07:40 crc kubenswrapper[4651]: I1011 05:07:40.571624 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/74e74db5-efd0-4198-82a1-ef76d751d1de-horizon-secret-key\") pod \"horizon-75955bb549-s67lw\" (UID: \"74e74db5-efd0-4198-82a1-ef76d751d1de\") " pod="openstack/horizon-75955bb549-s67lw" Oct 11 05:07:40 crc kubenswrapper[4651]: I1011 05:07:40.571678 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2fm4g\" (UniqueName: \"kubernetes.io/projected/74e74db5-efd0-4198-82a1-ef76d751d1de-kube-api-access-2fm4g\") pod \"horizon-75955bb549-s67lw\" (UID: \"74e74db5-efd0-4198-82a1-ef76d751d1de\") " pod="openstack/horizon-75955bb549-s67lw" Oct 11 05:07:40 crc kubenswrapper[4651]: I1011 05:07:40.571704 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/74e74db5-efd0-4198-82a1-ef76d751d1de-config-data\") pod \"horizon-75955bb549-s67lw\" (UID: \"74e74db5-efd0-4198-82a1-ef76d751d1de\") " pod="openstack/horizon-75955bb549-s67lw" Oct 11 05:07:40 crc kubenswrapper[4651]: I1011 05:07:40.571851 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/74e74db5-efd0-4198-82a1-ef76d751d1de-scripts\") pod \"horizon-75955bb549-s67lw\" (UID: \"74e74db5-efd0-4198-82a1-ef76d751d1de\") " pod="openstack/horizon-75955bb549-s67lw" Oct 11 05:07:40 crc kubenswrapper[4651]: I1011 05:07:40.571912 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/74e74db5-efd0-4198-82a1-ef76d751d1de-logs\") pod \"horizon-75955bb549-s67lw\" (UID: \"74e74db5-efd0-4198-82a1-ef76d751d1de\") " pod="openstack/horizon-75955bb549-s67lw" Oct 11 05:07:40 crc kubenswrapper[4651]: I1011 05:07:40.590708 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6c9c9f998c-gdq6v"] Oct 11 05:07:40 crc kubenswrapper[4651]: I1011 05:07:40.609078 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=4.609037264 podStartE2EDuration="4.609037264s" podCreationTimestamp="2025-10-11 05:07:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 05:07:40.532275241 +0000 UTC m=+981.428508027" watchObservedRunningTime="2025-10-11 05:07:40.609037264 +0000 UTC m=+981.505270060" Oct 11 05:07:40 crc kubenswrapper[4651]: I1011 05:07:40.623346 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-57c957c4ff-t96wz" podStartSLOduration=4.623294177 podStartE2EDuration="4.623294177s" podCreationTimestamp="2025-10-11 05:07:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 05:07:40.58924241 +0000 UTC m=+981.485475206" watchObservedRunningTime="2025-10-11 05:07:40.623294177 +0000 UTC m=+981.519526973" Oct 11 05:07:40 crc kubenswrapper[4651]: I1011 05:07:40.674129 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/74e74db5-efd0-4198-82a1-ef76d751d1de-scripts\") pod \"horizon-75955bb549-s67lw\" (UID: \"74e74db5-efd0-4198-82a1-ef76d751d1de\") " pod="openstack/horizon-75955bb549-s67lw" Oct 11 05:07:40 crc kubenswrapper[4651]: I1011 05:07:40.674259 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/74e74db5-efd0-4198-82a1-ef76d751d1de-logs\") pod \"horizon-75955bb549-s67lw\" (UID: \"74e74db5-efd0-4198-82a1-ef76d751d1de\") " pod="openstack/horizon-75955bb549-s67lw" Oct 11 05:07:40 crc kubenswrapper[4651]: I1011 05:07:40.674301 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/74e74db5-efd0-4198-82a1-ef76d751d1de-horizon-secret-key\") pod \"horizon-75955bb549-s67lw\" (UID: \"74e74db5-efd0-4198-82a1-ef76d751d1de\") " pod="openstack/horizon-75955bb549-s67lw" Oct 11 05:07:40 crc kubenswrapper[4651]: I1011 05:07:40.674321 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2fm4g\" (UniqueName: \"kubernetes.io/projected/74e74db5-efd0-4198-82a1-ef76d751d1de-kube-api-access-2fm4g\") pod \"horizon-75955bb549-s67lw\" (UID: \"74e74db5-efd0-4198-82a1-ef76d751d1de\") " pod="openstack/horizon-75955bb549-s67lw" Oct 11 05:07:40 crc kubenswrapper[4651]: I1011 05:07:40.674341 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/74e74db5-efd0-4198-82a1-ef76d751d1de-config-data\") pod \"horizon-75955bb549-s67lw\" (UID: \"74e74db5-efd0-4198-82a1-ef76d751d1de\") " pod="openstack/horizon-75955bb549-s67lw" Oct 11 05:07:40 crc kubenswrapper[4651]: I1011 05:07:40.676759 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/74e74db5-efd0-4198-82a1-ef76d751d1de-logs\") pod \"horizon-75955bb549-s67lw\" (UID: \"74e74db5-efd0-4198-82a1-ef76d751d1de\") " pod="openstack/horizon-75955bb549-s67lw" Oct 11 05:07:40 crc kubenswrapper[4651]: I1011 05:07:40.678371 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/74e74db5-efd0-4198-82a1-ef76d751d1de-scripts\") pod \"horizon-75955bb549-s67lw\" (UID: \"74e74db5-efd0-4198-82a1-ef76d751d1de\") " pod="openstack/horizon-75955bb549-s67lw" Oct 11 05:07:40 crc kubenswrapper[4651]: I1011 05:07:40.679498 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/74e74db5-efd0-4198-82a1-ef76d751d1de-config-data\") pod \"horizon-75955bb549-s67lw\" (UID: \"74e74db5-efd0-4198-82a1-ef76d751d1de\") " pod="openstack/horizon-75955bb549-s67lw" Oct 11 05:07:40 crc kubenswrapper[4651]: I1011 05:07:40.689038 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/74e74db5-efd0-4198-82a1-ef76d751d1de-horizon-secret-key\") pod \"horizon-75955bb549-s67lw\" (UID: \"74e74db5-efd0-4198-82a1-ef76d751d1de\") " pod="openstack/horizon-75955bb549-s67lw" Oct 11 05:07:40 crc kubenswrapper[4651]: I1011 05:07:40.694832 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2fm4g\" (UniqueName: \"kubernetes.io/projected/74e74db5-efd0-4198-82a1-ef76d751d1de-kube-api-access-2fm4g\") pod \"horizon-75955bb549-s67lw\" (UID: \"74e74db5-efd0-4198-82a1-ef76d751d1de\") " pod="openstack/horizon-75955bb549-s67lw" Oct 11 05:07:40 crc kubenswrapper[4651]: I1011 05:07:40.733539 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-75955bb549-s67lw" Oct 11 05:07:41 crc kubenswrapper[4651]: I1011 05:07:41.200217 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-75955bb549-s67lw"] Oct 11 05:07:41 crc kubenswrapper[4651]: I1011 05:07:41.266448 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 11 05:07:41 crc kubenswrapper[4651]: I1011 05:07:41.394376 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6a85fe60-6391-47c1-8f31-803a742a188a-scripts\") pod \"6a85fe60-6391-47c1-8f31-803a742a188a\" (UID: \"6a85fe60-6391-47c1-8f31-803a742a188a\") " Oct 11 05:07:41 crc kubenswrapper[4651]: I1011 05:07:41.394698 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6a85fe60-6391-47c1-8f31-803a742a188a-httpd-run\") pod \"6a85fe60-6391-47c1-8f31-803a742a188a\" (UID: \"6a85fe60-6391-47c1-8f31-803a742a188a\") " Oct 11 05:07:41 crc kubenswrapper[4651]: I1011 05:07:41.394746 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"6a85fe60-6391-47c1-8f31-803a742a188a\" (UID: \"6a85fe60-6391-47c1-8f31-803a742a188a\") " Oct 11 05:07:41 crc kubenswrapper[4651]: I1011 05:07:41.394801 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6a85fe60-6391-47c1-8f31-803a742a188a-public-tls-certs\") pod \"6a85fe60-6391-47c1-8f31-803a742a188a\" (UID: \"6a85fe60-6391-47c1-8f31-803a742a188a\") " Oct 11 05:07:41 crc kubenswrapper[4651]: I1011 05:07:41.394832 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6a85fe60-6391-47c1-8f31-803a742a188a-config-data\") pod \"6a85fe60-6391-47c1-8f31-803a742a188a\" (UID: \"6a85fe60-6391-47c1-8f31-803a742a188a\") " Oct 11 05:07:41 crc kubenswrapper[4651]: I1011 05:07:41.394854 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6a85fe60-6391-47c1-8f31-803a742a188a-logs\") pod \"6a85fe60-6391-47c1-8f31-803a742a188a\" (UID: \"6a85fe60-6391-47c1-8f31-803a742a188a\") " Oct 11 05:07:41 crc kubenswrapper[4651]: I1011 05:07:41.394875 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rbjq6\" (UniqueName: \"kubernetes.io/projected/6a85fe60-6391-47c1-8f31-803a742a188a-kube-api-access-rbjq6\") pod \"6a85fe60-6391-47c1-8f31-803a742a188a\" (UID: \"6a85fe60-6391-47c1-8f31-803a742a188a\") " Oct 11 05:07:41 crc kubenswrapper[4651]: I1011 05:07:41.394993 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6a85fe60-6391-47c1-8f31-803a742a188a-combined-ca-bundle\") pod \"6a85fe60-6391-47c1-8f31-803a742a188a\" (UID: \"6a85fe60-6391-47c1-8f31-803a742a188a\") " Oct 11 05:07:41 crc kubenswrapper[4651]: I1011 05:07:41.397218 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6a85fe60-6391-47c1-8f31-803a742a188a-logs" (OuterVolumeSpecName: "logs") pod "6a85fe60-6391-47c1-8f31-803a742a188a" (UID: "6a85fe60-6391-47c1-8f31-803a742a188a"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 05:07:41 crc kubenswrapper[4651]: I1011 05:07:41.397266 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6a85fe60-6391-47c1-8f31-803a742a188a-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "6a85fe60-6391-47c1-8f31-803a742a188a" (UID: "6a85fe60-6391-47c1-8f31-803a742a188a"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 05:07:41 crc kubenswrapper[4651]: I1011 05:07:41.399025 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6a85fe60-6391-47c1-8f31-803a742a188a-scripts" (OuterVolumeSpecName: "scripts") pod "6a85fe60-6391-47c1-8f31-803a742a188a" (UID: "6a85fe60-6391-47c1-8f31-803a742a188a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:07:41 crc kubenswrapper[4651]: I1011 05:07:41.400478 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage10-crc" (OuterVolumeSpecName: "glance") pod "6a85fe60-6391-47c1-8f31-803a742a188a" (UID: "6a85fe60-6391-47c1-8f31-803a742a188a"). InnerVolumeSpecName "local-storage10-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 11 05:07:41 crc kubenswrapper[4651]: I1011 05:07:41.404116 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6a85fe60-6391-47c1-8f31-803a742a188a-kube-api-access-rbjq6" (OuterVolumeSpecName: "kube-api-access-rbjq6") pod "6a85fe60-6391-47c1-8f31-803a742a188a" (UID: "6a85fe60-6391-47c1-8f31-803a742a188a"). InnerVolumeSpecName "kube-api-access-rbjq6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:07:41 crc kubenswrapper[4651]: I1011 05:07:41.425673 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6a85fe60-6391-47c1-8f31-803a742a188a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6a85fe60-6391-47c1-8f31-803a742a188a" (UID: "6a85fe60-6391-47c1-8f31-803a742a188a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:07:41 crc kubenswrapper[4651]: I1011 05:07:41.456002 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6a85fe60-6391-47c1-8f31-803a742a188a-config-data" (OuterVolumeSpecName: "config-data") pod "6a85fe60-6391-47c1-8f31-803a742a188a" (UID: "6a85fe60-6391-47c1-8f31-803a742a188a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:07:41 crc kubenswrapper[4651]: I1011 05:07:41.456052 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6a85fe60-6391-47c1-8f31-803a742a188a-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "6a85fe60-6391-47c1-8f31-803a742a188a" (UID: "6a85fe60-6391-47c1-8f31-803a742a188a"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:07:41 crc kubenswrapper[4651]: I1011 05:07:41.497476 4651 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6a85fe60-6391-47c1-8f31-803a742a188a-public-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 11 05:07:41 crc kubenswrapper[4651]: I1011 05:07:41.497512 4651 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6a85fe60-6391-47c1-8f31-803a742a188a-config-data\") on node \"crc\" DevicePath \"\"" Oct 11 05:07:41 crc kubenswrapper[4651]: I1011 05:07:41.497522 4651 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6a85fe60-6391-47c1-8f31-803a742a188a-logs\") on node \"crc\" DevicePath \"\"" Oct 11 05:07:41 crc kubenswrapper[4651]: I1011 05:07:41.497531 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rbjq6\" (UniqueName: \"kubernetes.io/projected/6a85fe60-6391-47c1-8f31-803a742a188a-kube-api-access-rbjq6\") on node \"crc\" DevicePath \"\"" Oct 11 05:07:41 crc kubenswrapper[4651]: I1011 05:07:41.497541 4651 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6a85fe60-6391-47c1-8f31-803a742a188a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 05:07:41 crc kubenswrapper[4651]: I1011 05:07:41.497548 4651 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6a85fe60-6391-47c1-8f31-803a742a188a-scripts\") on node \"crc\" DevicePath \"\"" Oct 11 05:07:41 crc kubenswrapper[4651]: I1011 05:07:41.497556 4651 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6a85fe60-6391-47c1-8f31-803a742a188a-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 11 05:07:41 crc kubenswrapper[4651]: I1011 05:07:41.497589 4651 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" " Oct 11 05:07:41 crc kubenswrapper[4651]: I1011 05:07:41.519128 4651 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage10-crc" (UniqueName: "kubernetes.io/local-volume/local-storage10-crc") on node "crc" Oct 11 05:07:41 crc kubenswrapper[4651]: I1011 05:07:41.525953 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 11 05:07:41 crc kubenswrapper[4651]: I1011 05:07:41.526369 4651 generic.go:334] "Generic (PLEG): container finished" podID="6a85fe60-6391-47c1-8f31-803a742a188a" containerID="43370a1e523b69ae25c3f37878bae64097cdb6429a7989d07ab68ed8bfc6f832" exitCode=143 Oct 11 05:07:41 crc kubenswrapper[4651]: I1011 05:07:41.526397 4651 generic.go:334] "Generic (PLEG): container finished" podID="6a85fe60-6391-47c1-8f31-803a742a188a" containerID="364c735ce9a35a6c40d89769547a9abdb6faacdb3527c3a12b8695f37b1b0cda" exitCode=143 Oct 11 05:07:41 crc kubenswrapper[4651]: I1011 05:07:41.526474 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"6a85fe60-6391-47c1-8f31-803a742a188a","Type":"ContainerDied","Data":"43370a1e523b69ae25c3f37878bae64097cdb6429a7989d07ab68ed8bfc6f832"} Oct 11 05:07:41 crc kubenswrapper[4651]: I1011 05:07:41.526505 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"6a85fe60-6391-47c1-8f31-803a742a188a","Type":"ContainerDied","Data":"364c735ce9a35a6c40d89769547a9abdb6faacdb3527c3a12b8695f37b1b0cda"} Oct 11 05:07:41 crc kubenswrapper[4651]: I1011 05:07:41.526515 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"6a85fe60-6391-47c1-8f31-803a742a188a","Type":"ContainerDied","Data":"8765069af190c8e8ee1ef9958d9cae748d35209a2a19ac8617512c46b82b81d1"} Oct 11 05:07:41 crc kubenswrapper[4651]: I1011 05:07:41.526529 4651 scope.go:117] "RemoveContainer" containerID="43370a1e523b69ae25c3f37878bae64097cdb6429a7989d07ab68ed8bfc6f832" Oct 11 05:07:41 crc kubenswrapper[4651]: I1011 05:07:41.528772 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-75955bb549-s67lw" event={"ID":"74e74db5-efd0-4198-82a1-ef76d751d1de","Type":"ContainerStarted","Data":"6eda59e5432c906cfd923da5195c9794867b3062565e7df3fd54edf56d9fdbf6"} Oct 11 05:07:41 crc kubenswrapper[4651]: I1011 05:07:41.531756 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"42822e28-23dc-4d16-a96b-bdef97cd87b4","Type":"ContainerStarted","Data":"980fd46f041c4e96e3e86fb3bde80c77b81261d420535c38d63a196a86b88e5e"} Oct 11 05:07:41 crc kubenswrapper[4651]: I1011 05:07:41.531935 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="42822e28-23dc-4d16-a96b-bdef97cd87b4" containerName="glance-log" containerID="cri-o://dd08989de1f1be8d0b72125ccc1ec1502812a5ebad8ea6145fc6cf2c3a55f7b8" gracePeriod=30 Oct 11 05:07:41 crc kubenswrapper[4651]: I1011 05:07:41.532052 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="42822e28-23dc-4d16-a96b-bdef97cd87b4" containerName="glance-httpd" containerID="cri-o://980fd46f041c4e96e3e86fb3bde80c77b81261d420535c38d63a196a86b88e5e" gracePeriod=30 Oct 11 05:07:41 crc kubenswrapper[4651]: I1011 05:07:41.533593 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-57c957c4ff-t96wz" Oct 11 05:07:41 crc kubenswrapper[4651]: I1011 05:07:41.556209 4651 scope.go:117] "RemoveContainer" containerID="364c735ce9a35a6c40d89769547a9abdb6faacdb3527c3a12b8695f37b1b0cda" Oct 11 05:07:41 crc kubenswrapper[4651]: I1011 05:07:41.559638 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=4.559618153 podStartE2EDuration="4.559618153s" podCreationTimestamp="2025-10-11 05:07:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 05:07:41.55832515 +0000 UTC m=+982.454557956" watchObservedRunningTime="2025-10-11 05:07:41.559618153 +0000 UTC m=+982.455850939" Oct 11 05:07:41 crc kubenswrapper[4651]: I1011 05:07:41.593845 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 11 05:07:41 crc kubenswrapper[4651]: I1011 05:07:41.599112 4651 reconciler_common.go:293] "Volume detached for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" DevicePath \"\"" Oct 11 05:07:41 crc kubenswrapper[4651]: I1011 05:07:41.599553 4651 scope.go:117] "RemoveContainer" containerID="43370a1e523b69ae25c3f37878bae64097cdb6429a7989d07ab68ed8bfc6f832" Oct 11 05:07:41 crc kubenswrapper[4651]: E1011 05:07:41.604435 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"43370a1e523b69ae25c3f37878bae64097cdb6429a7989d07ab68ed8bfc6f832\": container with ID starting with 43370a1e523b69ae25c3f37878bae64097cdb6429a7989d07ab68ed8bfc6f832 not found: ID does not exist" containerID="43370a1e523b69ae25c3f37878bae64097cdb6429a7989d07ab68ed8bfc6f832" Oct 11 05:07:41 crc kubenswrapper[4651]: I1011 05:07:41.604475 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"43370a1e523b69ae25c3f37878bae64097cdb6429a7989d07ab68ed8bfc6f832"} err="failed to get container status \"43370a1e523b69ae25c3f37878bae64097cdb6429a7989d07ab68ed8bfc6f832\": rpc error: code = NotFound desc = could not find container \"43370a1e523b69ae25c3f37878bae64097cdb6429a7989d07ab68ed8bfc6f832\": container with ID starting with 43370a1e523b69ae25c3f37878bae64097cdb6429a7989d07ab68ed8bfc6f832 not found: ID does not exist" Oct 11 05:07:41 crc kubenswrapper[4651]: I1011 05:07:41.604498 4651 scope.go:117] "RemoveContainer" containerID="364c735ce9a35a6c40d89769547a9abdb6faacdb3527c3a12b8695f37b1b0cda" Oct 11 05:07:41 crc kubenswrapper[4651]: E1011 05:07:41.605048 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"364c735ce9a35a6c40d89769547a9abdb6faacdb3527c3a12b8695f37b1b0cda\": container with ID starting with 364c735ce9a35a6c40d89769547a9abdb6faacdb3527c3a12b8695f37b1b0cda not found: ID does not exist" containerID="364c735ce9a35a6c40d89769547a9abdb6faacdb3527c3a12b8695f37b1b0cda" Oct 11 05:07:41 crc kubenswrapper[4651]: I1011 05:07:41.605086 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"364c735ce9a35a6c40d89769547a9abdb6faacdb3527c3a12b8695f37b1b0cda"} err="failed to get container status \"364c735ce9a35a6c40d89769547a9abdb6faacdb3527c3a12b8695f37b1b0cda\": rpc error: code = NotFound desc = could not find container \"364c735ce9a35a6c40d89769547a9abdb6faacdb3527c3a12b8695f37b1b0cda\": container with ID starting with 364c735ce9a35a6c40d89769547a9abdb6faacdb3527c3a12b8695f37b1b0cda not found: ID does not exist" Oct 11 05:07:41 crc kubenswrapper[4651]: I1011 05:07:41.605151 4651 scope.go:117] "RemoveContainer" containerID="43370a1e523b69ae25c3f37878bae64097cdb6429a7989d07ab68ed8bfc6f832" Oct 11 05:07:41 crc kubenswrapper[4651]: I1011 05:07:41.605961 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"43370a1e523b69ae25c3f37878bae64097cdb6429a7989d07ab68ed8bfc6f832"} err="failed to get container status \"43370a1e523b69ae25c3f37878bae64097cdb6429a7989d07ab68ed8bfc6f832\": rpc error: code = NotFound desc = could not find container \"43370a1e523b69ae25c3f37878bae64097cdb6429a7989d07ab68ed8bfc6f832\": container with ID starting with 43370a1e523b69ae25c3f37878bae64097cdb6429a7989d07ab68ed8bfc6f832 not found: ID does not exist" Oct 11 05:07:41 crc kubenswrapper[4651]: I1011 05:07:41.605980 4651 scope.go:117] "RemoveContainer" containerID="364c735ce9a35a6c40d89769547a9abdb6faacdb3527c3a12b8695f37b1b0cda" Oct 11 05:07:41 crc kubenswrapper[4651]: I1011 05:07:41.611133 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 11 05:07:41 crc kubenswrapper[4651]: I1011 05:07:41.617898 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"364c735ce9a35a6c40d89769547a9abdb6faacdb3527c3a12b8695f37b1b0cda"} err="failed to get container status \"364c735ce9a35a6c40d89769547a9abdb6faacdb3527c3a12b8695f37b1b0cda\": rpc error: code = NotFound desc = could not find container \"364c735ce9a35a6c40d89769547a9abdb6faacdb3527c3a12b8695f37b1b0cda\": container with ID starting with 364c735ce9a35a6c40d89769547a9abdb6faacdb3527c3a12b8695f37b1b0cda not found: ID does not exist" Oct 11 05:07:41 crc kubenswrapper[4651]: I1011 05:07:41.619828 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Oct 11 05:07:41 crc kubenswrapper[4651]: E1011 05:07:41.620358 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6a85fe60-6391-47c1-8f31-803a742a188a" containerName="glance-log" Oct 11 05:07:41 crc kubenswrapper[4651]: I1011 05:07:41.620374 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="6a85fe60-6391-47c1-8f31-803a742a188a" containerName="glance-log" Oct 11 05:07:41 crc kubenswrapper[4651]: E1011 05:07:41.620388 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6a85fe60-6391-47c1-8f31-803a742a188a" containerName="glance-httpd" Oct 11 05:07:41 crc kubenswrapper[4651]: I1011 05:07:41.620394 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="6a85fe60-6391-47c1-8f31-803a742a188a" containerName="glance-httpd" Oct 11 05:07:41 crc kubenswrapper[4651]: I1011 05:07:41.623004 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="6a85fe60-6391-47c1-8f31-803a742a188a" containerName="glance-log" Oct 11 05:07:41 crc kubenswrapper[4651]: I1011 05:07:41.623046 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="6a85fe60-6391-47c1-8f31-803a742a188a" containerName="glance-httpd" Oct 11 05:07:41 crc kubenswrapper[4651]: I1011 05:07:41.623980 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 11 05:07:41 crc kubenswrapper[4651]: I1011 05:07:41.628181 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Oct 11 05:07:41 crc kubenswrapper[4651]: I1011 05:07:41.629231 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Oct 11 05:07:41 crc kubenswrapper[4651]: I1011 05:07:41.657851 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 11 05:07:41 crc kubenswrapper[4651]: I1011 05:07:41.701431 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/29d2e57f-1259-4f23-9e62-ce5d86ace5a9-config-data\") pod \"glance-default-external-api-0\" (UID: \"29d2e57f-1259-4f23-9e62-ce5d86ace5a9\") " pod="openstack/glance-default-external-api-0" Oct 11 05:07:41 crc kubenswrapper[4651]: I1011 05:07:41.701524 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/29d2e57f-1259-4f23-9e62-ce5d86ace5a9-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"29d2e57f-1259-4f23-9e62-ce5d86ace5a9\") " pod="openstack/glance-default-external-api-0" Oct 11 05:07:41 crc kubenswrapper[4651]: I1011 05:07:41.701579 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rxmqr\" (UniqueName: \"kubernetes.io/projected/29d2e57f-1259-4f23-9e62-ce5d86ace5a9-kube-api-access-rxmqr\") pod \"glance-default-external-api-0\" (UID: \"29d2e57f-1259-4f23-9e62-ce5d86ace5a9\") " pod="openstack/glance-default-external-api-0" Oct 11 05:07:41 crc kubenswrapper[4651]: I1011 05:07:41.701605 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"29d2e57f-1259-4f23-9e62-ce5d86ace5a9\") " pod="openstack/glance-default-external-api-0" Oct 11 05:07:41 crc kubenswrapper[4651]: I1011 05:07:41.701679 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/29d2e57f-1259-4f23-9e62-ce5d86ace5a9-scripts\") pod \"glance-default-external-api-0\" (UID: \"29d2e57f-1259-4f23-9e62-ce5d86ace5a9\") " pod="openstack/glance-default-external-api-0" Oct 11 05:07:41 crc kubenswrapper[4651]: I1011 05:07:41.701700 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/29d2e57f-1259-4f23-9e62-ce5d86ace5a9-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"29d2e57f-1259-4f23-9e62-ce5d86ace5a9\") " pod="openstack/glance-default-external-api-0" Oct 11 05:07:41 crc kubenswrapper[4651]: I1011 05:07:41.701723 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/29d2e57f-1259-4f23-9e62-ce5d86ace5a9-logs\") pod \"glance-default-external-api-0\" (UID: \"29d2e57f-1259-4f23-9e62-ce5d86ace5a9\") " pod="openstack/glance-default-external-api-0" Oct 11 05:07:41 crc kubenswrapper[4651]: I1011 05:07:41.701744 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29d2e57f-1259-4f23-9e62-ce5d86ace5a9-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"29d2e57f-1259-4f23-9e62-ce5d86ace5a9\") " pod="openstack/glance-default-external-api-0" Oct 11 05:07:41 crc kubenswrapper[4651]: I1011 05:07:41.807499 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/29d2e57f-1259-4f23-9e62-ce5d86ace5a9-scripts\") pod \"glance-default-external-api-0\" (UID: \"29d2e57f-1259-4f23-9e62-ce5d86ace5a9\") " pod="openstack/glance-default-external-api-0" Oct 11 05:07:41 crc kubenswrapper[4651]: I1011 05:07:41.807585 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/29d2e57f-1259-4f23-9e62-ce5d86ace5a9-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"29d2e57f-1259-4f23-9e62-ce5d86ace5a9\") " pod="openstack/glance-default-external-api-0" Oct 11 05:07:41 crc kubenswrapper[4651]: I1011 05:07:41.807660 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/29d2e57f-1259-4f23-9e62-ce5d86ace5a9-logs\") pod \"glance-default-external-api-0\" (UID: \"29d2e57f-1259-4f23-9e62-ce5d86ace5a9\") " pod="openstack/glance-default-external-api-0" Oct 11 05:07:41 crc kubenswrapper[4651]: I1011 05:07:41.807696 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29d2e57f-1259-4f23-9e62-ce5d86ace5a9-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"29d2e57f-1259-4f23-9e62-ce5d86ace5a9\") " pod="openstack/glance-default-external-api-0" Oct 11 05:07:41 crc kubenswrapper[4651]: I1011 05:07:41.808923 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/29d2e57f-1259-4f23-9e62-ce5d86ace5a9-config-data\") pod \"glance-default-external-api-0\" (UID: \"29d2e57f-1259-4f23-9e62-ce5d86ace5a9\") " pod="openstack/glance-default-external-api-0" Oct 11 05:07:41 crc kubenswrapper[4651]: I1011 05:07:41.808996 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/29d2e57f-1259-4f23-9e62-ce5d86ace5a9-logs\") pod \"glance-default-external-api-0\" (UID: \"29d2e57f-1259-4f23-9e62-ce5d86ace5a9\") " pod="openstack/glance-default-external-api-0" Oct 11 05:07:41 crc kubenswrapper[4651]: I1011 05:07:41.809033 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/29d2e57f-1259-4f23-9e62-ce5d86ace5a9-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"29d2e57f-1259-4f23-9e62-ce5d86ace5a9\") " pod="openstack/glance-default-external-api-0" Oct 11 05:07:41 crc kubenswrapper[4651]: I1011 05:07:41.809141 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rxmqr\" (UniqueName: \"kubernetes.io/projected/29d2e57f-1259-4f23-9e62-ce5d86ace5a9-kube-api-access-rxmqr\") pod \"glance-default-external-api-0\" (UID: \"29d2e57f-1259-4f23-9e62-ce5d86ace5a9\") " pod="openstack/glance-default-external-api-0" Oct 11 05:07:41 crc kubenswrapper[4651]: I1011 05:07:41.809186 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"29d2e57f-1259-4f23-9e62-ce5d86ace5a9\") " pod="openstack/glance-default-external-api-0" Oct 11 05:07:41 crc kubenswrapper[4651]: I1011 05:07:41.809649 4651 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"29d2e57f-1259-4f23-9e62-ce5d86ace5a9\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/glance-default-external-api-0" Oct 11 05:07:41 crc kubenswrapper[4651]: I1011 05:07:41.809853 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/29d2e57f-1259-4f23-9e62-ce5d86ace5a9-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"29d2e57f-1259-4f23-9e62-ce5d86ace5a9\") " pod="openstack/glance-default-external-api-0" Oct 11 05:07:41 crc kubenswrapper[4651]: I1011 05:07:41.811944 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/29d2e57f-1259-4f23-9e62-ce5d86ace5a9-scripts\") pod \"glance-default-external-api-0\" (UID: \"29d2e57f-1259-4f23-9e62-ce5d86ace5a9\") " pod="openstack/glance-default-external-api-0" Oct 11 05:07:41 crc kubenswrapper[4651]: I1011 05:07:41.812591 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/29d2e57f-1259-4f23-9e62-ce5d86ace5a9-config-data\") pod \"glance-default-external-api-0\" (UID: \"29d2e57f-1259-4f23-9e62-ce5d86ace5a9\") " pod="openstack/glance-default-external-api-0" Oct 11 05:07:41 crc kubenswrapper[4651]: I1011 05:07:41.818075 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/29d2e57f-1259-4f23-9e62-ce5d86ace5a9-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"29d2e57f-1259-4f23-9e62-ce5d86ace5a9\") " pod="openstack/glance-default-external-api-0" Oct 11 05:07:41 crc kubenswrapper[4651]: I1011 05:07:41.820189 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29d2e57f-1259-4f23-9e62-ce5d86ace5a9-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"29d2e57f-1259-4f23-9e62-ce5d86ace5a9\") " pod="openstack/glance-default-external-api-0" Oct 11 05:07:41 crc kubenswrapper[4651]: I1011 05:07:41.835133 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rxmqr\" (UniqueName: \"kubernetes.io/projected/29d2e57f-1259-4f23-9e62-ce5d86ace5a9-kube-api-access-rxmqr\") pod \"glance-default-external-api-0\" (UID: \"29d2e57f-1259-4f23-9e62-ce5d86ace5a9\") " pod="openstack/glance-default-external-api-0" Oct 11 05:07:41 crc kubenswrapper[4651]: I1011 05:07:41.853448 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"29d2e57f-1259-4f23-9e62-ce5d86ace5a9\") " pod="openstack/glance-default-external-api-0" Oct 11 05:07:41 crc kubenswrapper[4651]: I1011 05:07:41.904104 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6a85fe60-6391-47c1-8f31-803a742a188a" path="/var/lib/kubelet/pods/6a85fe60-6391-47c1-8f31-803a742a188a/volumes" Oct 11 05:07:41 crc kubenswrapper[4651]: I1011 05:07:41.905170 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="80d6eea1-c7e1-4f5e-aa4a-ec74723db231" path="/var/lib/kubelet/pods/80d6eea1-c7e1-4f5e-aa4a-ec74723db231/volumes" Oct 11 05:07:41 crc kubenswrapper[4651]: I1011 05:07:41.963548 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 11 05:07:42 crc kubenswrapper[4651]: I1011 05:07:42.192939 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 11 05:07:42 crc kubenswrapper[4651]: I1011 05:07:42.321106 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"42822e28-23dc-4d16-a96b-bdef97cd87b4\" (UID: \"42822e28-23dc-4d16-a96b-bdef97cd87b4\") " Oct 11 05:07:42 crc kubenswrapper[4651]: I1011 05:07:42.321191 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kttnp\" (UniqueName: \"kubernetes.io/projected/42822e28-23dc-4d16-a96b-bdef97cd87b4-kube-api-access-kttnp\") pod \"42822e28-23dc-4d16-a96b-bdef97cd87b4\" (UID: \"42822e28-23dc-4d16-a96b-bdef97cd87b4\") " Oct 11 05:07:42 crc kubenswrapper[4651]: I1011 05:07:42.321274 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/42822e28-23dc-4d16-a96b-bdef97cd87b4-config-data\") pod \"42822e28-23dc-4d16-a96b-bdef97cd87b4\" (UID: \"42822e28-23dc-4d16-a96b-bdef97cd87b4\") " Oct 11 05:07:42 crc kubenswrapper[4651]: I1011 05:07:42.321367 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/42822e28-23dc-4d16-a96b-bdef97cd87b4-httpd-run\") pod \"42822e28-23dc-4d16-a96b-bdef97cd87b4\" (UID: \"42822e28-23dc-4d16-a96b-bdef97cd87b4\") " Oct 11 05:07:42 crc kubenswrapper[4651]: I1011 05:07:42.321405 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/42822e28-23dc-4d16-a96b-bdef97cd87b4-logs\") pod \"42822e28-23dc-4d16-a96b-bdef97cd87b4\" (UID: \"42822e28-23dc-4d16-a96b-bdef97cd87b4\") " Oct 11 05:07:42 crc kubenswrapper[4651]: I1011 05:07:42.321504 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/42822e28-23dc-4d16-a96b-bdef97cd87b4-internal-tls-certs\") pod \"42822e28-23dc-4d16-a96b-bdef97cd87b4\" (UID: \"42822e28-23dc-4d16-a96b-bdef97cd87b4\") " Oct 11 05:07:42 crc kubenswrapper[4651]: I1011 05:07:42.321542 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/42822e28-23dc-4d16-a96b-bdef97cd87b4-combined-ca-bundle\") pod \"42822e28-23dc-4d16-a96b-bdef97cd87b4\" (UID: \"42822e28-23dc-4d16-a96b-bdef97cd87b4\") " Oct 11 05:07:42 crc kubenswrapper[4651]: I1011 05:07:42.321565 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/42822e28-23dc-4d16-a96b-bdef97cd87b4-scripts\") pod \"42822e28-23dc-4d16-a96b-bdef97cd87b4\" (UID: \"42822e28-23dc-4d16-a96b-bdef97cd87b4\") " Oct 11 05:07:42 crc kubenswrapper[4651]: I1011 05:07:42.323158 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/42822e28-23dc-4d16-a96b-bdef97cd87b4-logs" (OuterVolumeSpecName: "logs") pod "42822e28-23dc-4d16-a96b-bdef97cd87b4" (UID: "42822e28-23dc-4d16-a96b-bdef97cd87b4"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 05:07:42 crc kubenswrapper[4651]: I1011 05:07:42.323747 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/42822e28-23dc-4d16-a96b-bdef97cd87b4-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "42822e28-23dc-4d16-a96b-bdef97cd87b4" (UID: "42822e28-23dc-4d16-a96b-bdef97cd87b4"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 05:07:42 crc kubenswrapper[4651]: I1011 05:07:42.326267 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage02-crc" (OuterVolumeSpecName: "glance") pod "42822e28-23dc-4d16-a96b-bdef97cd87b4" (UID: "42822e28-23dc-4d16-a96b-bdef97cd87b4"). InnerVolumeSpecName "local-storage02-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 11 05:07:42 crc kubenswrapper[4651]: I1011 05:07:42.327512 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/42822e28-23dc-4d16-a96b-bdef97cd87b4-scripts" (OuterVolumeSpecName: "scripts") pod "42822e28-23dc-4d16-a96b-bdef97cd87b4" (UID: "42822e28-23dc-4d16-a96b-bdef97cd87b4"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:07:42 crc kubenswrapper[4651]: I1011 05:07:42.330506 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/42822e28-23dc-4d16-a96b-bdef97cd87b4-kube-api-access-kttnp" (OuterVolumeSpecName: "kube-api-access-kttnp") pod "42822e28-23dc-4d16-a96b-bdef97cd87b4" (UID: "42822e28-23dc-4d16-a96b-bdef97cd87b4"). InnerVolumeSpecName "kube-api-access-kttnp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:07:42 crc kubenswrapper[4651]: I1011 05:07:42.353416 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/42822e28-23dc-4d16-a96b-bdef97cd87b4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "42822e28-23dc-4d16-a96b-bdef97cd87b4" (UID: "42822e28-23dc-4d16-a96b-bdef97cd87b4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:07:42 crc kubenswrapper[4651]: I1011 05:07:42.379737 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/42822e28-23dc-4d16-a96b-bdef97cd87b4-config-data" (OuterVolumeSpecName: "config-data") pod "42822e28-23dc-4d16-a96b-bdef97cd87b4" (UID: "42822e28-23dc-4d16-a96b-bdef97cd87b4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:07:42 crc kubenswrapper[4651]: I1011 05:07:42.381928 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/42822e28-23dc-4d16-a96b-bdef97cd87b4-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "42822e28-23dc-4d16-a96b-bdef97cd87b4" (UID: "42822e28-23dc-4d16-a96b-bdef97cd87b4"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:07:42 crc kubenswrapper[4651]: I1011 05:07:42.424171 4651 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/42822e28-23dc-4d16-a96b-bdef97cd87b4-config-data\") on node \"crc\" DevicePath \"\"" Oct 11 05:07:42 crc kubenswrapper[4651]: I1011 05:07:42.424210 4651 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/42822e28-23dc-4d16-a96b-bdef97cd87b4-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 11 05:07:42 crc kubenswrapper[4651]: I1011 05:07:42.424223 4651 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/42822e28-23dc-4d16-a96b-bdef97cd87b4-logs\") on node \"crc\" DevicePath \"\"" Oct 11 05:07:42 crc kubenswrapper[4651]: I1011 05:07:42.424235 4651 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/42822e28-23dc-4d16-a96b-bdef97cd87b4-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 11 05:07:42 crc kubenswrapper[4651]: I1011 05:07:42.424247 4651 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/42822e28-23dc-4d16-a96b-bdef97cd87b4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 05:07:42 crc kubenswrapper[4651]: I1011 05:07:42.424257 4651 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/42822e28-23dc-4d16-a96b-bdef97cd87b4-scripts\") on node \"crc\" DevicePath \"\"" Oct 11 05:07:42 crc kubenswrapper[4651]: I1011 05:07:42.424293 4651 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" " Oct 11 05:07:42 crc kubenswrapper[4651]: I1011 05:07:42.424302 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kttnp\" (UniqueName: \"kubernetes.io/projected/42822e28-23dc-4d16-a96b-bdef97cd87b4-kube-api-access-kttnp\") on node \"crc\" DevicePath \"\"" Oct 11 05:07:42 crc kubenswrapper[4651]: I1011 05:07:42.449880 4651 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage02-crc" (UniqueName: "kubernetes.io/local-volume/local-storage02-crc") on node "crc" Oct 11 05:07:42 crc kubenswrapper[4651]: I1011 05:07:42.525793 4651 reconciler_common.go:293] "Volume detached for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" DevicePath \"\"" Oct 11 05:07:42 crc kubenswrapper[4651]: I1011 05:07:42.553032 4651 generic.go:334] "Generic (PLEG): container finished" podID="42822e28-23dc-4d16-a96b-bdef97cd87b4" containerID="980fd46f041c4e96e3e86fb3bde80c77b81261d420535c38d63a196a86b88e5e" exitCode=0 Oct 11 05:07:42 crc kubenswrapper[4651]: I1011 05:07:42.553073 4651 generic.go:334] "Generic (PLEG): container finished" podID="42822e28-23dc-4d16-a96b-bdef97cd87b4" containerID="dd08989de1f1be8d0b72125ccc1ec1502812a5ebad8ea6145fc6cf2c3a55f7b8" exitCode=143 Oct 11 05:07:42 crc kubenswrapper[4651]: I1011 05:07:42.553089 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 11 05:07:42 crc kubenswrapper[4651]: I1011 05:07:42.553130 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"42822e28-23dc-4d16-a96b-bdef97cd87b4","Type":"ContainerDied","Data":"980fd46f041c4e96e3e86fb3bde80c77b81261d420535c38d63a196a86b88e5e"} Oct 11 05:07:42 crc kubenswrapper[4651]: I1011 05:07:42.553236 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"42822e28-23dc-4d16-a96b-bdef97cd87b4","Type":"ContainerDied","Data":"dd08989de1f1be8d0b72125ccc1ec1502812a5ebad8ea6145fc6cf2c3a55f7b8"} Oct 11 05:07:42 crc kubenswrapper[4651]: I1011 05:07:42.553250 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"42822e28-23dc-4d16-a96b-bdef97cd87b4","Type":"ContainerDied","Data":"065b75751b1cfac0965af70f6c38aeffd04cc9d151c8e04f8a9147ebb29f6e7b"} Oct 11 05:07:42 crc kubenswrapper[4651]: I1011 05:07:42.553266 4651 scope.go:117] "RemoveContainer" containerID="980fd46f041c4e96e3e86fb3bde80c77b81261d420535c38d63a196a86b88e5e" Oct 11 05:07:42 crc kubenswrapper[4651]: I1011 05:07:42.556788 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 11 05:07:42 crc kubenswrapper[4651]: I1011 05:07:42.558202 4651 generic.go:334] "Generic (PLEG): container finished" podID="81c9e0ff-d205-4fa2-9606-5ba89c367008" containerID="9afbbc7cb4c14d6ce0164bfe604ff8f34b985ca3fbb40debe1e9f79f85e3940b" exitCode=0 Oct 11 05:07:42 crc kubenswrapper[4651]: I1011 05:07:42.558262 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-kb7c9" event={"ID":"81c9e0ff-d205-4fa2-9606-5ba89c367008","Type":"ContainerDied","Data":"9afbbc7cb4c14d6ce0164bfe604ff8f34b985ca3fbb40debe1e9f79f85e3940b"} Oct 11 05:07:42 crc kubenswrapper[4651]: I1011 05:07:42.608043 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 11 05:07:42 crc kubenswrapper[4651]: I1011 05:07:42.643796 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 11 05:07:42 crc kubenswrapper[4651]: I1011 05:07:42.666088 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 11 05:07:42 crc kubenswrapper[4651]: E1011 05:07:42.667756 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="42822e28-23dc-4d16-a96b-bdef97cd87b4" containerName="glance-httpd" Oct 11 05:07:42 crc kubenswrapper[4651]: I1011 05:07:42.667774 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="42822e28-23dc-4d16-a96b-bdef97cd87b4" containerName="glance-httpd" Oct 11 05:07:42 crc kubenswrapper[4651]: E1011 05:07:42.668049 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="42822e28-23dc-4d16-a96b-bdef97cd87b4" containerName="glance-log" Oct 11 05:07:42 crc kubenswrapper[4651]: I1011 05:07:42.668075 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="42822e28-23dc-4d16-a96b-bdef97cd87b4" containerName="glance-log" Oct 11 05:07:42 crc kubenswrapper[4651]: I1011 05:07:42.668716 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="42822e28-23dc-4d16-a96b-bdef97cd87b4" containerName="glance-httpd" Oct 11 05:07:42 crc kubenswrapper[4651]: I1011 05:07:42.668986 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="42822e28-23dc-4d16-a96b-bdef97cd87b4" containerName="glance-log" Oct 11 05:07:42 crc kubenswrapper[4651]: I1011 05:07:42.747460 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 11 05:07:42 crc kubenswrapper[4651]: I1011 05:07:42.747576 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 11 05:07:42 crc kubenswrapper[4651]: I1011 05:07:42.750063 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Oct 11 05:07:42 crc kubenswrapper[4651]: I1011 05:07:42.750581 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Oct 11 05:07:42 crc kubenswrapper[4651]: I1011 05:07:42.840046 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-internal-api-0\" (UID: \"f2d90a7b-5995-43a7-8c9b-05639ac9e9fe\") " pod="openstack/glance-default-internal-api-0" Oct 11 05:07:42 crc kubenswrapper[4651]: I1011 05:07:42.840098 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f2d90a7b-5995-43a7-8c9b-05639ac9e9fe-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"f2d90a7b-5995-43a7-8c9b-05639ac9e9fe\") " pod="openstack/glance-default-internal-api-0" Oct 11 05:07:42 crc kubenswrapper[4651]: I1011 05:07:42.840131 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f2d90a7b-5995-43a7-8c9b-05639ac9e9fe-logs\") pod \"glance-default-internal-api-0\" (UID: \"f2d90a7b-5995-43a7-8c9b-05639ac9e9fe\") " pod="openstack/glance-default-internal-api-0" Oct 11 05:07:42 crc kubenswrapper[4651]: I1011 05:07:42.840238 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2d90a7b-5995-43a7-8c9b-05639ac9e9fe-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"f2d90a7b-5995-43a7-8c9b-05639ac9e9fe\") " pod="openstack/glance-default-internal-api-0" Oct 11 05:07:42 crc kubenswrapper[4651]: I1011 05:07:42.840264 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f2d90a7b-5995-43a7-8c9b-05639ac9e9fe-config-data\") pod \"glance-default-internal-api-0\" (UID: \"f2d90a7b-5995-43a7-8c9b-05639ac9e9fe\") " pod="openstack/glance-default-internal-api-0" Oct 11 05:07:42 crc kubenswrapper[4651]: I1011 05:07:42.840289 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h8vhg\" (UniqueName: \"kubernetes.io/projected/f2d90a7b-5995-43a7-8c9b-05639ac9e9fe-kube-api-access-h8vhg\") pod \"glance-default-internal-api-0\" (UID: \"f2d90a7b-5995-43a7-8c9b-05639ac9e9fe\") " pod="openstack/glance-default-internal-api-0" Oct 11 05:07:42 crc kubenswrapper[4651]: I1011 05:07:42.840351 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f2d90a7b-5995-43a7-8c9b-05639ac9e9fe-scripts\") pod \"glance-default-internal-api-0\" (UID: \"f2d90a7b-5995-43a7-8c9b-05639ac9e9fe\") " pod="openstack/glance-default-internal-api-0" Oct 11 05:07:42 crc kubenswrapper[4651]: I1011 05:07:42.840533 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f2d90a7b-5995-43a7-8c9b-05639ac9e9fe-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"f2d90a7b-5995-43a7-8c9b-05639ac9e9fe\") " pod="openstack/glance-default-internal-api-0" Oct 11 05:07:42 crc kubenswrapper[4651]: I1011 05:07:42.943513 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-internal-api-0\" (UID: \"f2d90a7b-5995-43a7-8c9b-05639ac9e9fe\") " pod="openstack/glance-default-internal-api-0" Oct 11 05:07:42 crc kubenswrapper[4651]: I1011 05:07:42.943575 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f2d90a7b-5995-43a7-8c9b-05639ac9e9fe-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"f2d90a7b-5995-43a7-8c9b-05639ac9e9fe\") " pod="openstack/glance-default-internal-api-0" Oct 11 05:07:42 crc kubenswrapper[4651]: I1011 05:07:42.943626 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f2d90a7b-5995-43a7-8c9b-05639ac9e9fe-logs\") pod \"glance-default-internal-api-0\" (UID: \"f2d90a7b-5995-43a7-8c9b-05639ac9e9fe\") " pod="openstack/glance-default-internal-api-0" Oct 11 05:07:42 crc kubenswrapper[4651]: I1011 05:07:42.943735 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2d90a7b-5995-43a7-8c9b-05639ac9e9fe-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"f2d90a7b-5995-43a7-8c9b-05639ac9e9fe\") " pod="openstack/glance-default-internal-api-0" Oct 11 05:07:42 crc kubenswrapper[4651]: I1011 05:07:42.943764 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f2d90a7b-5995-43a7-8c9b-05639ac9e9fe-config-data\") pod \"glance-default-internal-api-0\" (UID: \"f2d90a7b-5995-43a7-8c9b-05639ac9e9fe\") " pod="openstack/glance-default-internal-api-0" Oct 11 05:07:42 crc kubenswrapper[4651]: I1011 05:07:42.943794 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h8vhg\" (UniqueName: \"kubernetes.io/projected/f2d90a7b-5995-43a7-8c9b-05639ac9e9fe-kube-api-access-h8vhg\") pod \"glance-default-internal-api-0\" (UID: \"f2d90a7b-5995-43a7-8c9b-05639ac9e9fe\") " pod="openstack/glance-default-internal-api-0" Oct 11 05:07:42 crc kubenswrapper[4651]: I1011 05:07:42.943843 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f2d90a7b-5995-43a7-8c9b-05639ac9e9fe-scripts\") pod \"glance-default-internal-api-0\" (UID: \"f2d90a7b-5995-43a7-8c9b-05639ac9e9fe\") " pod="openstack/glance-default-internal-api-0" Oct 11 05:07:42 crc kubenswrapper[4651]: I1011 05:07:42.943893 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f2d90a7b-5995-43a7-8c9b-05639ac9e9fe-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"f2d90a7b-5995-43a7-8c9b-05639ac9e9fe\") " pod="openstack/glance-default-internal-api-0" Oct 11 05:07:42 crc kubenswrapper[4651]: I1011 05:07:42.944176 4651 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-internal-api-0\" (UID: \"f2d90a7b-5995-43a7-8c9b-05639ac9e9fe\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/glance-default-internal-api-0" Oct 11 05:07:42 crc kubenswrapper[4651]: I1011 05:07:42.944455 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f2d90a7b-5995-43a7-8c9b-05639ac9e9fe-logs\") pod \"glance-default-internal-api-0\" (UID: \"f2d90a7b-5995-43a7-8c9b-05639ac9e9fe\") " pod="openstack/glance-default-internal-api-0" Oct 11 05:07:42 crc kubenswrapper[4651]: I1011 05:07:42.944634 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f2d90a7b-5995-43a7-8c9b-05639ac9e9fe-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"f2d90a7b-5995-43a7-8c9b-05639ac9e9fe\") " pod="openstack/glance-default-internal-api-0" Oct 11 05:07:42 crc kubenswrapper[4651]: I1011 05:07:42.956680 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2d90a7b-5995-43a7-8c9b-05639ac9e9fe-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"f2d90a7b-5995-43a7-8c9b-05639ac9e9fe\") " pod="openstack/glance-default-internal-api-0" Oct 11 05:07:42 crc kubenswrapper[4651]: I1011 05:07:42.962214 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f2d90a7b-5995-43a7-8c9b-05639ac9e9fe-scripts\") pod \"glance-default-internal-api-0\" (UID: \"f2d90a7b-5995-43a7-8c9b-05639ac9e9fe\") " pod="openstack/glance-default-internal-api-0" Oct 11 05:07:42 crc kubenswrapper[4651]: I1011 05:07:42.976883 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f2d90a7b-5995-43a7-8c9b-05639ac9e9fe-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"f2d90a7b-5995-43a7-8c9b-05639ac9e9fe\") " pod="openstack/glance-default-internal-api-0" Oct 11 05:07:42 crc kubenswrapper[4651]: I1011 05:07:42.979619 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f2d90a7b-5995-43a7-8c9b-05639ac9e9fe-config-data\") pod \"glance-default-internal-api-0\" (UID: \"f2d90a7b-5995-43a7-8c9b-05639ac9e9fe\") " pod="openstack/glance-default-internal-api-0" Oct 11 05:07:42 crc kubenswrapper[4651]: I1011 05:07:42.991368 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h8vhg\" (UniqueName: \"kubernetes.io/projected/f2d90a7b-5995-43a7-8c9b-05639ac9e9fe-kube-api-access-h8vhg\") pod \"glance-default-internal-api-0\" (UID: \"f2d90a7b-5995-43a7-8c9b-05639ac9e9fe\") " pod="openstack/glance-default-internal-api-0" Oct 11 05:07:43 crc kubenswrapper[4651]: I1011 05:07:43.003204 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-internal-api-0\" (UID: \"f2d90a7b-5995-43a7-8c9b-05639ac9e9fe\") " pod="openstack/glance-default-internal-api-0" Oct 11 05:07:43 crc kubenswrapper[4651]: I1011 05:07:43.065596 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 11 05:07:43 crc kubenswrapper[4651]: I1011 05:07:43.889084 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="42822e28-23dc-4d16-a96b-bdef97cd87b4" path="/var/lib/kubelet/pods/42822e28-23dc-4d16-a96b-bdef97cd87b4/volumes" Oct 11 05:07:45 crc kubenswrapper[4651]: I1011 05:07:45.770644 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-69899bbb49-h5pnl"] Oct 11 05:07:45 crc kubenswrapper[4651]: I1011 05:07:45.826755 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-68976f6bc6-9jl66"] Oct 11 05:07:45 crc kubenswrapper[4651]: I1011 05:07:45.828213 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-68976f6bc6-9jl66" Oct 11 05:07:45 crc kubenswrapper[4651]: I1011 05:07:45.830587 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-horizon-svc" Oct 11 05:07:45 crc kubenswrapper[4651]: I1011 05:07:45.838690 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 11 05:07:45 crc kubenswrapper[4651]: I1011 05:07:45.844997 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-68976f6bc6-9jl66"] Oct 11 05:07:45 crc kubenswrapper[4651]: I1011 05:07:45.927981 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/93e6a5fd-218d-44c2-9bf9-2796b1ff0c33-logs\") pod \"horizon-68976f6bc6-9jl66\" (UID: \"93e6a5fd-218d-44c2-9bf9-2796b1ff0c33\") " pod="openstack/horizon-68976f6bc6-9jl66" Oct 11 05:07:45 crc kubenswrapper[4651]: I1011 05:07:45.928033 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/93e6a5fd-218d-44c2-9bf9-2796b1ff0c33-config-data\") pod \"horizon-68976f6bc6-9jl66\" (UID: \"93e6a5fd-218d-44c2-9bf9-2796b1ff0c33\") " pod="openstack/horizon-68976f6bc6-9jl66" Oct 11 05:07:45 crc kubenswrapper[4651]: I1011 05:07:45.928091 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/93e6a5fd-218d-44c2-9bf9-2796b1ff0c33-horizon-secret-key\") pod \"horizon-68976f6bc6-9jl66\" (UID: \"93e6a5fd-218d-44c2-9bf9-2796b1ff0c33\") " pod="openstack/horizon-68976f6bc6-9jl66" Oct 11 05:07:45 crc kubenswrapper[4651]: I1011 05:07:45.928141 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/93e6a5fd-218d-44c2-9bf9-2796b1ff0c33-scripts\") pod \"horizon-68976f6bc6-9jl66\" (UID: \"93e6a5fd-218d-44c2-9bf9-2796b1ff0c33\") " pod="openstack/horizon-68976f6bc6-9jl66" Oct 11 05:07:45 crc kubenswrapper[4651]: I1011 05:07:45.928174 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/93e6a5fd-218d-44c2-9bf9-2796b1ff0c33-combined-ca-bundle\") pod \"horizon-68976f6bc6-9jl66\" (UID: \"93e6a5fd-218d-44c2-9bf9-2796b1ff0c33\") " pod="openstack/horizon-68976f6bc6-9jl66" Oct 11 05:07:45 crc kubenswrapper[4651]: I1011 05:07:45.928203 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/93e6a5fd-218d-44c2-9bf9-2796b1ff0c33-horizon-tls-certs\") pod \"horizon-68976f6bc6-9jl66\" (UID: \"93e6a5fd-218d-44c2-9bf9-2796b1ff0c33\") " pod="openstack/horizon-68976f6bc6-9jl66" Oct 11 05:07:45 crc kubenswrapper[4651]: I1011 05:07:45.928316 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sz4x9\" (UniqueName: \"kubernetes.io/projected/93e6a5fd-218d-44c2-9bf9-2796b1ff0c33-kube-api-access-sz4x9\") pod \"horizon-68976f6bc6-9jl66\" (UID: \"93e6a5fd-218d-44c2-9bf9-2796b1ff0c33\") " pod="openstack/horizon-68976f6bc6-9jl66" Oct 11 05:07:45 crc kubenswrapper[4651]: I1011 05:07:45.995273 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 11 05:07:45 crc kubenswrapper[4651]: I1011 05:07:45.995329 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-75955bb549-s67lw"] Oct 11 05:07:46 crc kubenswrapper[4651]: I1011 05:07:46.007706 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-5f7d84485b-zb5s7"] Oct 11 05:07:46 crc kubenswrapper[4651]: I1011 05:07:46.011038 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5f7d84485b-zb5s7" Oct 11 05:07:46 crc kubenswrapper[4651]: I1011 05:07:46.033295 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/93e6a5fd-218d-44c2-9bf9-2796b1ff0c33-logs\") pod \"horizon-68976f6bc6-9jl66\" (UID: \"93e6a5fd-218d-44c2-9bf9-2796b1ff0c33\") " pod="openstack/horizon-68976f6bc6-9jl66" Oct 11 05:07:46 crc kubenswrapper[4651]: I1011 05:07:46.033359 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/93e6a5fd-218d-44c2-9bf9-2796b1ff0c33-config-data\") pod \"horizon-68976f6bc6-9jl66\" (UID: \"93e6a5fd-218d-44c2-9bf9-2796b1ff0c33\") " pod="openstack/horizon-68976f6bc6-9jl66" Oct 11 05:07:46 crc kubenswrapper[4651]: I1011 05:07:46.033475 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/93e6a5fd-218d-44c2-9bf9-2796b1ff0c33-horizon-secret-key\") pod \"horizon-68976f6bc6-9jl66\" (UID: \"93e6a5fd-218d-44c2-9bf9-2796b1ff0c33\") " pod="openstack/horizon-68976f6bc6-9jl66" Oct 11 05:07:46 crc kubenswrapper[4651]: I1011 05:07:46.033602 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/93e6a5fd-218d-44c2-9bf9-2796b1ff0c33-scripts\") pod \"horizon-68976f6bc6-9jl66\" (UID: \"93e6a5fd-218d-44c2-9bf9-2796b1ff0c33\") " pod="openstack/horizon-68976f6bc6-9jl66" Oct 11 05:07:46 crc kubenswrapper[4651]: I1011 05:07:46.033731 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/93e6a5fd-218d-44c2-9bf9-2796b1ff0c33-combined-ca-bundle\") pod \"horizon-68976f6bc6-9jl66\" (UID: \"93e6a5fd-218d-44c2-9bf9-2796b1ff0c33\") " pod="openstack/horizon-68976f6bc6-9jl66" Oct 11 05:07:46 crc kubenswrapper[4651]: I1011 05:07:46.034239 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/93e6a5fd-218d-44c2-9bf9-2796b1ff0c33-horizon-tls-certs\") pod \"horizon-68976f6bc6-9jl66\" (UID: \"93e6a5fd-218d-44c2-9bf9-2796b1ff0c33\") " pod="openstack/horizon-68976f6bc6-9jl66" Oct 11 05:07:46 crc kubenswrapper[4651]: I1011 05:07:46.035050 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-5f7d84485b-zb5s7"] Oct 11 05:07:46 crc kubenswrapper[4651]: I1011 05:07:46.035128 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sz4x9\" (UniqueName: \"kubernetes.io/projected/93e6a5fd-218d-44c2-9bf9-2796b1ff0c33-kube-api-access-sz4x9\") pod \"horizon-68976f6bc6-9jl66\" (UID: \"93e6a5fd-218d-44c2-9bf9-2796b1ff0c33\") " pod="openstack/horizon-68976f6bc6-9jl66" Oct 11 05:07:46 crc kubenswrapper[4651]: I1011 05:07:46.036359 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/93e6a5fd-218d-44c2-9bf9-2796b1ff0c33-logs\") pod \"horizon-68976f6bc6-9jl66\" (UID: \"93e6a5fd-218d-44c2-9bf9-2796b1ff0c33\") " pod="openstack/horizon-68976f6bc6-9jl66" Oct 11 05:07:46 crc kubenswrapper[4651]: I1011 05:07:46.036812 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/93e6a5fd-218d-44c2-9bf9-2796b1ff0c33-scripts\") pod \"horizon-68976f6bc6-9jl66\" (UID: \"93e6a5fd-218d-44c2-9bf9-2796b1ff0c33\") " pod="openstack/horizon-68976f6bc6-9jl66" Oct 11 05:07:46 crc kubenswrapper[4651]: I1011 05:07:46.038700 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/93e6a5fd-218d-44c2-9bf9-2796b1ff0c33-config-data\") pod \"horizon-68976f6bc6-9jl66\" (UID: \"93e6a5fd-218d-44c2-9bf9-2796b1ff0c33\") " pod="openstack/horizon-68976f6bc6-9jl66" Oct 11 05:07:46 crc kubenswrapper[4651]: I1011 05:07:46.042221 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/93e6a5fd-218d-44c2-9bf9-2796b1ff0c33-horizon-secret-key\") pod \"horizon-68976f6bc6-9jl66\" (UID: \"93e6a5fd-218d-44c2-9bf9-2796b1ff0c33\") " pod="openstack/horizon-68976f6bc6-9jl66" Oct 11 05:07:46 crc kubenswrapper[4651]: I1011 05:07:46.042338 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/93e6a5fd-218d-44c2-9bf9-2796b1ff0c33-combined-ca-bundle\") pod \"horizon-68976f6bc6-9jl66\" (UID: \"93e6a5fd-218d-44c2-9bf9-2796b1ff0c33\") " pod="openstack/horizon-68976f6bc6-9jl66" Oct 11 05:07:46 crc kubenswrapper[4651]: I1011 05:07:46.054104 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sz4x9\" (UniqueName: \"kubernetes.io/projected/93e6a5fd-218d-44c2-9bf9-2796b1ff0c33-kube-api-access-sz4x9\") pod \"horizon-68976f6bc6-9jl66\" (UID: \"93e6a5fd-218d-44c2-9bf9-2796b1ff0c33\") " pod="openstack/horizon-68976f6bc6-9jl66" Oct 11 05:07:46 crc kubenswrapper[4651]: I1011 05:07:46.052486 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/93e6a5fd-218d-44c2-9bf9-2796b1ff0c33-horizon-tls-certs\") pod \"horizon-68976f6bc6-9jl66\" (UID: \"93e6a5fd-218d-44c2-9bf9-2796b1ff0c33\") " pod="openstack/horizon-68976f6bc6-9jl66" Oct 11 05:07:46 crc kubenswrapper[4651]: I1011 05:07:46.136909 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a75ab800-fe1f-453d-b9f9-1cc1f39b6ca7-scripts\") pod \"horizon-5f7d84485b-zb5s7\" (UID: \"a75ab800-fe1f-453d-b9f9-1cc1f39b6ca7\") " pod="openstack/horizon-5f7d84485b-zb5s7" Oct 11 05:07:46 crc kubenswrapper[4651]: I1011 05:07:46.137005 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a75ab800-fe1f-453d-b9f9-1cc1f39b6ca7-logs\") pod \"horizon-5f7d84485b-zb5s7\" (UID: \"a75ab800-fe1f-453d-b9f9-1cc1f39b6ca7\") " pod="openstack/horizon-5f7d84485b-zb5s7" Oct 11 05:07:46 crc kubenswrapper[4651]: I1011 05:07:46.137024 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a75ab800-fe1f-453d-b9f9-1cc1f39b6ca7-combined-ca-bundle\") pod \"horizon-5f7d84485b-zb5s7\" (UID: \"a75ab800-fe1f-453d-b9f9-1cc1f39b6ca7\") " pod="openstack/horizon-5f7d84485b-zb5s7" Oct 11 05:07:46 crc kubenswrapper[4651]: I1011 05:07:46.137044 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a75ab800-fe1f-453d-b9f9-1cc1f39b6ca7-config-data\") pod \"horizon-5f7d84485b-zb5s7\" (UID: \"a75ab800-fe1f-453d-b9f9-1cc1f39b6ca7\") " pod="openstack/horizon-5f7d84485b-zb5s7" Oct 11 05:07:46 crc kubenswrapper[4651]: I1011 05:07:46.137064 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a75ab800-fe1f-453d-b9f9-1cc1f39b6ca7-horizon-secret-key\") pod \"horizon-5f7d84485b-zb5s7\" (UID: \"a75ab800-fe1f-453d-b9f9-1cc1f39b6ca7\") " pod="openstack/horizon-5f7d84485b-zb5s7" Oct 11 05:07:46 crc kubenswrapper[4651]: I1011 05:07:46.137082 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6wzvt\" (UniqueName: \"kubernetes.io/projected/a75ab800-fe1f-453d-b9f9-1cc1f39b6ca7-kube-api-access-6wzvt\") pod \"horizon-5f7d84485b-zb5s7\" (UID: \"a75ab800-fe1f-453d-b9f9-1cc1f39b6ca7\") " pod="openstack/horizon-5f7d84485b-zb5s7" Oct 11 05:07:46 crc kubenswrapper[4651]: I1011 05:07:46.137099 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/a75ab800-fe1f-453d-b9f9-1cc1f39b6ca7-horizon-tls-certs\") pod \"horizon-5f7d84485b-zb5s7\" (UID: \"a75ab800-fe1f-453d-b9f9-1cc1f39b6ca7\") " pod="openstack/horizon-5f7d84485b-zb5s7" Oct 11 05:07:46 crc kubenswrapper[4651]: I1011 05:07:46.193094 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-68976f6bc6-9jl66" Oct 11 05:07:46 crc kubenswrapper[4651]: I1011 05:07:46.239858 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a75ab800-fe1f-453d-b9f9-1cc1f39b6ca7-scripts\") pod \"horizon-5f7d84485b-zb5s7\" (UID: \"a75ab800-fe1f-453d-b9f9-1cc1f39b6ca7\") " pod="openstack/horizon-5f7d84485b-zb5s7" Oct 11 05:07:46 crc kubenswrapper[4651]: I1011 05:07:46.240002 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a75ab800-fe1f-453d-b9f9-1cc1f39b6ca7-logs\") pod \"horizon-5f7d84485b-zb5s7\" (UID: \"a75ab800-fe1f-453d-b9f9-1cc1f39b6ca7\") " pod="openstack/horizon-5f7d84485b-zb5s7" Oct 11 05:07:46 crc kubenswrapper[4651]: I1011 05:07:46.240032 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a75ab800-fe1f-453d-b9f9-1cc1f39b6ca7-combined-ca-bundle\") pod \"horizon-5f7d84485b-zb5s7\" (UID: \"a75ab800-fe1f-453d-b9f9-1cc1f39b6ca7\") " pod="openstack/horizon-5f7d84485b-zb5s7" Oct 11 05:07:46 crc kubenswrapper[4651]: I1011 05:07:46.240516 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a75ab800-fe1f-453d-b9f9-1cc1f39b6ca7-logs\") pod \"horizon-5f7d84485b-zb5s7\" (UID: \"a75ab800-fe1f-453d-b9f9-1cc1f39b6ca7\") " pod="openstack/horizon-5f7d84485b-zb5s7" Oct 11 05:07:46 crc kubenswrapper[4651]: I1011 05:07:46.240670 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a75ab800-fe1f-453d-b9f9-1cc1f39b6ca7-scripts\") pod \"horizon-5f7d84485b-zb5s7\" (UID: \"a75ab800-fe1f-453d-b9f9-1cc1f39b6ca7\") " pod="openstack/horizon-5f7d84485b-zb5s7" Oct 11 05:07:46 crc kubenswrapper[4651]: I1011 05:07:46.242505 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a75ab800-fe1f-453d-b9f9-1cc1f39b6ca7-config-data\") pod \"horizon-5f7d84485b-zb5s7\" (UID: \"a75ab800-fe1f-453d-b9f9-1cc1f39b6ca7\") " pod="openstack/horizon-5f7d84485b-zb5s7" Oct 11 05:07:46 crc kubenswrapper[4651]: I1011 05:07:46.243710 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a75ab800-fe1f-453d-b9f9-1cc1f39b6ca7-config-data\") pod \"horizon-5f7d84485b-zb5s7\" (UID: \"a75ab800-fe1f-453d-b9f9-1cc1f39b6ca7\") " pod="openstack/horizon-5f7d84485b-zb5s7" Oct 11 05:07:46 crc kubenswrapper[4651]: I1011 05:07:46.243783 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a75ab800-fe1f-453d-b9f9-1cc1f39b6ca7-horizon-secret-key\") pod \"horizon-5f7d84485b-zb5s7\" (UID: \"a75ab800-fe1f-453d-b9f9-1cc1f39b6ca7\") " pod="openstack/horizon-5f7d84485b-zb5s7" Oct 11 05:07:46 crc kubenswrapper[4651]: I1011 05:07:46.244131 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6wzvt\" (UniqueName: \"kubernetes.io/projected/a75ab800-fe1f-453d-b9f9-1cc1f39b6ca7-kube-api-access-6wzvt\") pod \"horizon-5f7d84485b-zb5s7\" (UID: \"a75ab800-fe1f-453d-b9f9-1cc1f39b6ca7\") " pod="openstack/horizon-5f7d84485b-zb5s7" Oct 11 05:07:46 crc kubenswrapper[4651]: I1011 05:07:46.244171 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/a75ab800-fe1f-453d-b9f9-1cc1f39b6ca7-horizon-tls-certs\") pod \"horizon-5f7d84485b-zb5s7\" (UID: \"a75ab800-fe1f-453d-b9f9-1cc1f39b6ca7\") " pod="openstack/horizon-5f7d84485b-zb5s7" Oct 11 05:07:46 crc kubenswrapper[4651]: I1011 05:07:46.245480 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a75ab800-fe1f-453d-b9f9-1cc1f39b6ca7-combined-ca-bundle\") pod \"horizon-5f7d84485b-zb5s7\" (UID: \"a75ab800-fe1f-453d-b9f9-1cc1f39b6ca7\") " pod="openstack/horizon-5f7d84485b-zb5s7" Oct 11 05:07:46 crc kubenswrapper[4651]: I1011 05:07:46.247054 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a75ab800-fe1f-453d-b9f9-1cc1f39b6ca7-horizon-secret-key\") pod \"horizon-5f7d84485b-zb5s7\" (UID: \"a75ab800-fe1f-453d-b9f9-1cc1f39b6ca7\") " pod="openstack/horizon-5f7d84485b-zb5s7" Oct 11 05:07:46 crc kubenswrapper[4651]: I1011 05:07:46.247737 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/a75ab800-fe1f-453d-b9f9-1cc1f39b6ca7-horizon-tls-certs\") pod \"horizon-5f7d84485b-zb5s7\" (UID: \"a75ab800-fe1f-453d-b9f9-1cc1f39b6ca7\") " pod="openstack/horizon-5f7d84485b-zb5s7" Oct 11 05:07:46 crc kubenswrapper[4651]: I1011 05:07:46.263545 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6wzvt\" (UniqueName: \"kubernetes.io/projected/a75ab800-fe1f-453d-b9f9-1cc1f39b6ca7-kube-api-access-6wzvt\") pod \"horizon-5f7d84485b-zb5s7\" (UID: \"a75ab800-fe1f-453d-b9f9-1cc1f39b6ca7\") " pod="openstack/horizon-5f7d84485b-zb5s7" Oct 11 05:07:46 crc kubenswrapper[4651]: I1011 05:07:46.335490 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5f7d84485b-zb5s7" Oct 11 05:07:47 crc kubenswrapper[4651]: I1011 05:07:47.340036 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-57c957c4ff-t96wz" Oct 11 05:07:47 crc kubenswrapper[4651]: I1011 05:07:47.420528 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6d5b6d6b67-s97wk"] Oct 11 05:07:47 crc kubenswrapper[4651]: I1011 05:07:47.420808 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6d5b6d6b67-s97wk" podUID="4fb44cbf-367e-4d0e-95f6-7411e5b76817" containerName="dnsmasq-dns" containerID="cri-o://7704156b0cbfa5e6bfb0dc4ce81d0ef1948d4485444be7b2fc4b5a3768746120" gracePeriod=10 Oct 11 05:07:47 crc kubenswrapper[4651]: I1011 05:07:47.486147 4651 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-6d5b6d6b67-s97wk" podUID="4fb44cbf-367e-4d0e-95f6-7411e5b76817" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.126:5353: connect: connection refused" Oct 11 05:07:48 crc kubenswrapper[4651]: I1011 05:07:48.633171 4651 generic.go:334] "Generic (PLEG): container finished" podID="4fb44cbf-367e-4d0e-95f6-7411e5b76817" containerID="7704156b0cbfa5e6bfb0dc4ce81d0ef1948d4485444be7b2fc4b5a3768746120" exitCode=0 Oct 11 05:07:48 crc kubenswrapper[4651]: I1011 05:07:48.633240 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d5b6d6b67-s97wk" event={"ID":"4fb44cbf-367e-4d0e-95f6-7411e5b76817","Type":"ContainerDied","Data":"7704156b0cbfa5e6bfb0dc4ce81d0ef1948d4485444be7b2fc4b5a3768746120"} Oct 11 05:07:52 crc kubenswrapper[4651]: I1011 05:07:52.486627 4651 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-6d5b6d6b67-s97wk" podUID="4fb44cbf-367e-4d0e-95f6-7411e5b76817" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.126:5353: connect: connection refused" Oct 11 05:07:56 crc kubenswrapper[4651]: E1011 05:07:56.455899 4651 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-placement-api:current-podified" Oct 11 05:07:56 crc kubenswrapper[4651]: E1011 05:07:56.458311 4651 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:placement-db-sync,Image:quay.io/podified-antelope-centos9/openstack-placement-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:true,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/placement,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:false,MountPath:/var/lib/openstack/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:placement-dbsync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-kqcc7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42482,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-db-sync-ctfxs_openstack(96c528a9-d9c6-4eec-b63f-5bba189744ae): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 11 05:07:56 crc kubenswrapper[4651]: E1011 05:07:56.461133 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"placement-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/placement-db-sync-ctfxs" podUID="96c528a9-d9c6-4eec-b63f-5bba189744ae" Oct 11 05:07:56 crc kubenswrapper[4651]: E1011 05:07:56.696879 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"placement-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-placement-api:current-podified\\\"\"" pod="openstack/placement-db-sync-ctfxs" podUID="96c528a9-d9c6-4eec-b63f-5bba189744ae" Oct 11 05:07:56 crc kubenswrapper[4651]: E1011 05:07:56.890832 4651 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-ceilometer-central:current-podified" Oct 11 05:07:56 crc kubenswrapper[4651]: E1011 05:07:56.890987 4651 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:ceilometer-central-agent,Image:quay.io/podified-antelope-centos9/openstack-ceilometer-central:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n5f4h654hf6h5d8h5fh5d8h5b8h657h5dch67fh8dh5bbh5b9hc8h9chdhf5h587h659hf7h564h55h57fh94h5f9h6dh655h8bh86h9bh5bh7q,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/var/lib/openstack/bin,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/openstack/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:ceilometer-central-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-p6kxl,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/bin/python3 /var/lib/openstack/bin/centralhealth.py],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:300,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ceilometer-0_openstack(16b0d24a-e647-4381-9f03-9b48c34ba52f): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 11 05:07:57 crc kubenswrapper[4651]: I1011 05:07:57.486281 4651 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-6d5b6d6b67-s97wk" podUID="4fb44cbf-367e-4d0e-95f6-7411e5b76817" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.126:5353: connect: connection refused" Oct 11 05:07:57 crc kubenswrapper[4651]: I1011 05:07:57.486437 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6d5b6d6b67-s97wk" Oct 11 05:07:58 crc kubenswrapper[4651]: W1011 05:07:58.374082 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod29d2e57f_1259_4f23_9e62_ce5d86ace5a9.slice/crio-61b32a0343d9676520f279051da4ed45bc56e2bc73b7ced4e447de8256032635 WatchSource:0}: Error finding container 61b32a0343d9676520f279051da4ed45bc56e2bc73b7ced4e447de8256032635: Status 404 returned error can't find the container with id 61b32a0343d9676520f279051da4ed45bc56e2bc73b7ced4e447de8256032635 Oct 11 05:07:58 crc kubenswrapper[4651]: E1011 05:07:58.393210 4651 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-horizon:current-podified" Oct 11 05:07:58 crc kubenswrapper[4651]: E1011 05:07:58.393387 4651 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n5d9h58fh5fh594h58dh565h5d7h5d9h5cfh558h68ch665h574h575h584h5bfh5c6h5f4hb8h658h64fh5d6h57bhc9h66h66bh59dh65ch5d7h686h644h545q,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-cq6qg,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-69899bbb49-h5pnl_openstack(fda7363a-5096-4fd1-9549-dcd2e5c4f70b): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 11 05:07:58 crc kubenswrapper[4651]: E1011 05:07:58.410030 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\"]" pod="openstack/horizon-69899bbb49-h5pnl" podUID="fda7363a-5096-4fd1-9549-dcd2e5c4f70b" Oct 11 05:07:58 crc kubenswrapper[4651]: E1011 05:07:58.412954 4651 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-horizon:current-podified" Oct 11 05:07:58 crc kubenswrapper[4651]: E1011 05:07:58.413124 4651 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n5d8h74hbdh57fh5b7hfch69hffh6bh68fh55dh57dh5bbh5c8h5dch549hbhc6hd6h5c9h5b4h67dh596h88h588hf4h9dh644h568h66fh87h58q,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-c4szm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-64c56644c5-nxbb9_openstack(786ef132-9cae-4be8-9ffa-7dc9bcd86a5a): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 11 05:07:58 crc kubenswrapper[4651]: E1011 05:07:58.415217 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\"]" pod="openstack/horizon-64c56644c5-nxbb9" podUID="786ef132-9cae-4be8-9ffa-7dc9bcd86a5a" Oct 11 05:07:58 crc kubenswrapper[4651]: I1011 05:07:58.709450 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"29d2e57f-1259-4f23-9e62-ce5d86ace5a9","Type":"ContainerStarted","Data":"61b32a0343d9676520f279051da4ed45bc56e2bc73b7ced4e447de8256032635"} Oct 11 05:07:59 crc kubenswrapper[4651]: E1011 05:07:59.013056 4651 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified" Oct 11 05:07:59 crc kubenswrapper[4651]: E1011 05:07:59.013208 4651 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:barbican-db-sync,Image:quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified,Command:[/bin/bash],Args:[-c barbican-manage db upgrade],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/barbican/barbican.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-tcf7n,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42403,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42403,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod barbican-db-sync-chq8s_openstack(4f425c3a-376b-4ba1-8066-96b2d1f21698): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 11 05:07:59 crc kubenswrapper[4651]: E1011 05:07:59.014545 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/barbican-db-sync-chq8s" podUID="4f425c3a-376b-4ba1-8066-96b2d1f21698" Oct 11 05:07:59 crc kubenswrapper[4651]: I1011 05:07:59.091802 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-kb7c9" Oct 11 05:07:59 crc kubenswrapper[4651]: I1011 05:07:59.208223 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/81c9e0ff-d205-4fa2-9606-5ba89c367008-credential-keys\") pod \"81c9e0ff-d205-4fa2-9606-5ba89c367008\" (UID: \"81c9e0ff-d205-4fa2-9606-5ba89c367008\") " Oct 11 05:07:59 crc kubenswrapper[4651]: I1011 05:07:59.208352 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/81c9e0ff-d205-4fa2-9606-5ba89c367008-scripts\") pod \"81c9e0ff-d205-4fa2-9606-5ba89c367008\" (UID: \"81c9e0ff-d205-4fa2-9606-5ba89c367008\") " Oct 11 05:07:59 crc kubenswrapper[4651]: I1011 05:07:59.208443 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z4kg8\" (UniqueName: \"kubernetes.io/projected/81c9e0ff-d205-4fa2-9606-5ba89c367008-kube-api-access-z4kg8\") pod \"81c9e0ff-d205-4fa2-9606-5ba89c367008\" (UID: \"81c9e0ff-d205-4fa2-9606-5ba89c367008\") " Oct 11 05:07:59 crc kubenswrapper[4651]: I1011 05:07:59.208489 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81c9e0ff-d205-4fa2-9606-5ba89c367008-combined-ca-bundle\") pod \"81c9e0ff-d205-4fa2-9606-5ba89c367008\" (UID: \"81c9e0ff-d205-4fa2-9606-5ba89c367008\") " Oct 11 05:07:59 crc kubenswrapper[4651]: I1011 05:07:59.208564 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/81c9e0ff-d205-4fa2-9606-5ba89c367008-config-data\") pod \"81c9e0ff-d205-4fa2-9606-5ba89c367008\" (UID: \"81c9e0ff-d205-4fa2-9606-5ba89c367008\") " Oct 11 05:07:59 crc kubenswrapper[4651]: I1011 05:07:59.208600 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/81c9e0ff-d205-4fa2-9606-5ba89c367008-fernet-keys\") pod \"81c9e0ff-d205-4fa2-9606-5ba89c367008\" (UID: \"81c9e0ff-d205-4fa2-9606-5ba89c367008\") " Oct 11 05:07:59 crc kubenswrapper[4651]: I1011 05:07:59.214994 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/81c9e0ff-d205-4fa2-9606-5ba89c367008-scripts" (OuterVolumeSpecName: "scripts") pod "81c9e0ff-d205-4fa2-9606-5ba89c367008" (UID: "81c9e0ff-d205-4fa2-9606-5ba89c367008"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:07:59 crc kubenswrapper[4651]: I1011 05:07:59.215388 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/81c9e0ff-d205-4fa2-9606-5ba89c367008-kube-api-access-z4kg8" (OuterVolumeSpecName: "kube-api-access-z4kg8") pod "81c9e0ff-d205-4fa2-9606-5ba89c367008" (UID: "81c9e0ff-d205-4fa2-9606-5ba89c367008"). InnerVolumeSpecName "kube-api-access-z4kg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:07:59 crc kubenswrapper[4651]: I1011 05:07:59.216403 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/81c9e0ff-d205-4fa2-9606-5ba89c367008-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "81c9e0ff-d205-4fa2-9606-5ba89c367008" (UID: "81c9e0ff-d205-4fa2-9606-5ba89c367008"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:07:59 crc kubenswrapper[4651]: I1011 05:07:59.218160 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/81c9e0ff-d205-4fa2-9606-5ba89c367008-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "81c9e0ff-d205-4fa2-9606-5ba89c367008" (UID: "81c9e0ff-d205-4fa2-9606-5ba89c367008"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:07:59 crc kubenswrapper[4651]: I1011 05:07:59.237359 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/81c9e0ff-d205-4fa2-9606-5ba89c367008-config-data" (OuterVolumeSpecName: "config-data") pod "81c9e0ff-d205-4fa2-9606-5ba89c367008" (UID: "81c9e0ff-d205-4fa2-9606-5ba89c367008"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:07:59 crc kubenswrapper[4651]: I1011 05:07:59.237742 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/81c9e0ff-d205-4fa2-9606-5ba89c367008-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "81c9e0ff-d205-4fa2-9606-5ba89c367008" (UID: "81c9e0ff-d205-4fa2-9606-5ba89c367008"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:07:59 crc kubenswrapper[4651]: I1011 05:07:59.311366 4651 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/81c9e0ff-d205-4fa2-9606-5ba89c367008-credential-keys\") on node \"crc\" DevicePath \"\"" Oct 11 05:07:59 crc kubenswrapper[4651]: I1011 05:07:59.311418 4651 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/81c9e0ff-d205-4fa2-9606-5ba89c367008-scripts\") on node \"crc\" DevicePath \"\"" Oct 11 05:07:59 crc kubenswrapper[4651]: I1011 05:07:59.311427 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z4kg8\" (UniqueName: \"kubernetes.io/projected/81c9e0ff-d205-4fa2-9606-5ba89c367008-kube-api-access-z4kg8\") on node \"crc\" DevicePath \"\"" Oct 11 05:07:59 crc kubenswrapper[4651]: I1011 05:07:59.311439 4651 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81c9e0ff-d205-4fa2-9606-5ba89c367008-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 05:07:59 crc kubenswrapper[4651]: I1011 05:07:59.311447 4651 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/81c9e0ff-d205-4fa2-9606-5ba89c367008-config-data\") on node \"crc\" DevicePath \"\"" Oct 11 05:07:59 crc kubenswrapper[4651]: I1011 05:07:59.311454 4651 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/81c9e0ff-d205-4fa2-9606-5ba89c367008-fernet-keys\") on node \"crc\" DevicePath \"\"" Oct 11 05:07:59 crc kubenswrapper[4651]: I1011 05:07:59.720915 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-kb7c9" event={"ID":"81c9e0ff-d205-4fa2-9606-5ba89c367008","Type":"ContainerDied","Data":"14e8fe56227b1d8e90e55bf72454b7340832262f6b83b37e348ad57caa633447"} Oct 11 05:07:59 crc kubenswrapper[4651]: I1011 05:07:59.720963 4651 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="14e8fe56227b1d8e90e55bf72454b7340832262f6b83b37e348ad57caa633447" Oct 11 05:07:59 crc kubenswrapper[4651]: I1011 05:07:59.721212 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-kb7c9" Oct 11 05:07:59 crc kubenswrapper[4651]: E1011 05:07:59.731597 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified\\\"\"" pod="openstack/barbican-db-sync-chq8s" podUID="4f425c3a-376b-4ba1-8066-96b2d1f21698" Oct 11 05:08:00 crc kubenswrapper[4651]: I1011 05:08:00.179245 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-kb7c9"] Oct 11 05:08:00 crc kubenswrapper[4651]: I1011 05:08:00.186259 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-kb7c9"] Oct 11 05:08:00 crc kubenswrapper[4651]: I1011 05:08:00.268684 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-9rf4f"] Oct 11 05:08:00 crc kubenswrapper[4651]: E1011 05:08:00.269303 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81c9e0ff-d205-4fa2-9606-5ba89c367008" containerName="keystone-bootstrap" Oct 11 05:08:00 crc kubenswrapper[4651]: I1011 05:08:00.269341 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="81c9e0ff-d205-4fa2-9606-5ba89c367008" containerName="keystone-bootstrap" Oct 11 05:08:00 crc kubenswrapper[4651]: I1011 05:08:00.269682 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="81c9e0ff-d205-4fa2-9606-5ba89c367008" containerName="keystone-bootstrap" Oct 11 05:08:00 crc kubenswrapper[4651]: I1011 05:08:00.270920 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-9rf4f" Oct 11 05:08:00 crc kubenswrapper[4651]: I1011 05:08:00.273345 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Oct 11 05:08:00 crc kubenswrapper[4651]: I1011 05:08:00.273529 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Oct 11 05:08:00 crc kubenswrapper[4651]: I1011 05:08:00.278535 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-2zq9v" Oct 11 05:08:00 crc kubenswrapper[4651]: I1011 05:08:00.279001 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Oct 11 05:08:00 crc kubenswrapper[4651]: I1011 05:08:00.284227 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-9rf4f"] Oct 11 05:08:00 crc kubenswrapper[4651]: I1011 05:08:00.431888 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/261ffa67-4305-4260-903d-93b8af576721-scripts\") pod \"keystone-bootstrap-9rf4f\" (UID: \"261ffa67-4305-4260-903d-93b8af576721\") " pod="openstack/keystone-bootstrap-9rf4f" Oct 11 05:08:00 crc kubenswrapper[4651]: I1011 05:08:00.432240 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/261ffa67-4305-4260-903d-93b8af576721-config-data\") pod \"keystone-bootstrap-9rf4f\" (UID: \"261ffa67-4305-4260-903d-93b8af576721\") " pod="openstack/keystone-bootstrap-9rf4f" Oct 11 05:08:00 crc kubenswrapper[4651]: I1011 05:08:00.432394 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/261ffa67-4305-4260-903d-93b8af576721-credential-keys\") pod \"keystone-bootstrap-9rf4f\" (UID: \"261ffa67-4305-4260-903d-93b8af576721\") " pod="openstack/keystone-bootstrap-9rf4f" Oct 11 05:08:00 crc kubenswrapper[4651]: I1011 05:08:00.432570 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r6s8f\" (UniqueName: \"kubernetes.io/projected/261ffa67-4305-4260-903d-93b8af576721-kube-api-access-r6s8f\") pod \"keystone-bootstrap-9rf4f\" (UID: \"261ffa67-4305-4260-903d-93b8af576721\") " pod="openstack/keystone-bootstrap-9rf4f" Oct 11 05:08:00 crc kubenswrapper[4651]: I1011 05:08:00.432777 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/261ffa67-4305-4260-903d-93b8af576721-combined-ca-bundle\") pod \"keystone-bootstrap-9rf4f\" (UID: \"261ffa67-4305-4260-903d-93b8af576721\") " pod="openstack/keystone-bootstrap-9rf4f" Oct 11 05:08:00 crc kubenswrapper[4651]: I1011 05:08:00.432806 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/261ffa67-4305-4260-903d-93b8af576721-fernet-keys\") pod \"keystone-bootstrap-9rf4f\" (UID: \"261ffa67-4305-4260-903d-93b8af576721\") " pod="openstack/keystone-bootstrap-9rf4f" Oct 11 05:08:00 crc kubenswrapper[4651]: I1011 05:08:00.534915 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r6s8f\" (UniqueName: \"kubernetes.io/projected/261ffa67-4305-4260-903d-93b8af576721-kube-api-access-r6s8f\") pod \"keystone-bootstrap-9rf4f\" (UID: \"261ffa67-4305-4260-903d-93b8af576721\") " pod="openstack/keystone-bootstrap-9rf4f" Oct 11 05:08:00 crc kubenswrapper[4651]: I1011 05:08:00.535025 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/261ffa67-4305-4260-903d-93b8af576721-combined-ca-bundle\") pod \"keystone-bootstrap-9rf4f\" (UID: \"261ffa67-4305-4260-903d-93b8af576721\") " pod="openstack/keystone-bootstrap-9rf4f" Oct 11 05:08:00 crc kubenswrapper[4651]: I1011 05:08:00.535047 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/261ffa67-4305-4260-903d-93b8af576721-fernet-keys\") pod \"keystone-bootstrap-9rf4f\" (UID: \"261ffa67-4305-4260-903d-93b8af576721\") " pod="openstack/keystone-bootstrap-9rf4f" Oct 11 05:08:00 crc kubenswrapper[4651]: I1011 05:08:00.535100 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/261ffa67-4305-4260-903d-93b8af576721-scripts\") pod \"keystone-bootstrap-9rf4f\" (UID: \"261ffa67-4305-4260-903d-93b8af576721\") " pod="openstack/keystone-bootstrap-9rf4f" Oct 11 05:08:00 crc kubenswrapper[4651]: I1011 05:08:00.535120 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/261ffa67-4305-4260-903d-93b8af576721-config-data\") pod \"keystone-bootstrap-9rf4f\" (UID: \"261ffa67-4305-4260-903d-93b8af576721\") " pod="openstack/keystone-bootstrap-9rf4f" Oct 11 05:08:00 crc kubenswrapper[4651]: I1011 05:08:00.535152 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/261ffa67-4305-4260-903d-93b8af576721-credential-keys\") pod \"keystone-bootstrap-9rf4f\" (UID: \"261ffa67-4305-4260-903d-93b8af576721\") " pod="openstack/keystone-bootstrap-9rf4f" Oct 11 05:08:00 crc kubenswrapper[4651]: I1011 05:08:00.540702 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/261ffa67-4305-4260-903d-93b8af576721-combined-ca-bundle\") pod \"keystone-bootstrap-9rf4f\" (UID: \"261ffa67-4305-4260-903d-93b8af576721\") " pod="openstack/keystone-bootstrap-9rf4f" Oct 11 05:08:00 crc kubenswrapper[4651]: I1011 05:08:00.541057 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/261ffa67-4305-4260-903d-93b8af576721-credential-keys\") pod \"keystone-bootstrap-9rf4f\" (UID: \"261ffa67-4305-4260-903d-93b8af576721\") " pod="openstack/keystone-bootstrap-9rf4f" Oct 11 05:08:00 crc kubenswrapper[4651]: I1011 05:08:00.541696 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/261ffa67-4305-4260-903d-93b8af576721-fernet-keys\") pod \"keystone-bootstrap-9rf4f\" (UID: \"261ffa67-4305-4260-903d-93b8af576721\") " pod="openstack/keystone-bootstrap-9rf4f" Oct 11 05:08:00 crc kubenswrapper[4651]: I1011 05:08:00.541696 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/261ffa67-4305-4260-903d-93b8af576721-config-data\") pod \"keystone-bootstrap-9rf4f\" (UID: \"261ffa67-4305-4260-903d-93b8af576721\") " pod="openstack/keystone-bootstrap-9rf4f" Oct 11 05:08:00 crc kubenswrapper[4651]: I1011 05:08:00.550369 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/261ffa67-4305-4260-903d-93b8af576721-scripts\") pod \"keystone-bootstrap-9rf4f\" (UID: \"261ffa67-4305-4260-903d-93b8af576721\") " pod="openstack/keystone-bootstrap-9rf4f" Oct 11 05:08:00 crc kubenswrapper[4651]: I1011 05:08:00.551429 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r6s8f\" (UniqueName: \"kubernetes.io/projected/261ffa67-4305-4260-903d-93b8af576721-kube-api-access-r6s8f\") pod \"keystone-bootstrap-9rf4f\" (UID: \"261ffa67-4305-4260-903d-93b8af576721\") " pod="openstack/keystone-bootstrap-9rf4f" Oct 11 05:08:00 crc kubenswrapper[4651]: I1011 05:08:00.602402 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-9rf4f" Oct 11 05:08:01 crc kubenswrapper[4651]: I1011 05:08:01.879224 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="81c9e0ff-d205-4fa2-9606-5ba89c367008" path="/var/lib/kubelet/pods/81c9e0ff-d205-4fa2-9606-5ba89c367008/volumes" Oct 11 05:08:05 crc kubenswrapper[4651]: I1011 05:08:05.619857 4651 scope.go:117] "RemoveContainer" containerID="dd08989de1f1be8d0b72125ccc1ec1502812a5ebad8ea6145fc6cf2c3a55f7b8" Oct 11 05:08:05 crc kubenswrapper[4651]: I1011 05:08:05.732844 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6d5b6d6b67-s97wk" Oct 11 05:08:05 crc kubenswrapper[4651]: I1011 05:08:05.737412 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-69899bbb49-h5pnl" Oct 11 05:08:05 crc kubenswrapper[4651]: I1011 05:08:05.743259 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-64c56644c5-nxbb9" Oct 11 05:08:05 crc kubenswrapper[4651]: I1011 05:08:05.790108 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-69899bbb49-h5pnl" event={"ID":"fda7363a-5096-4fd1-9549-dcd2e5c4f70b","Type":"ContainerDied","Data":"4b53827ad2a9f8ae815036c1f2636fb74161fdd0b109c2bec44ed8bb88b29c08"} Oct 11 05:08:05 crc kubenswrapper[4651]: I1011 05:08:05.790167 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-69899bbb49-h5pnl" Oct 11 05:08:05 crc kubenswrapper[4651]: I1011 05:08:05.791877 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-64c56644c5-nxbb9" Oct 11 05:08:05 crc kubenswrapper[4651]: I1011 05:08:05.791882 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-64c56644c5-nxbb9" event={"ID":"786ef132-9cae-4be8-9ffa-7dc9bcd86a5a","Type":"ContainerDied","Data":"bfe9e49291f3638cd5c724fc86fa068855c5296d243b3727a829624966544861"} Oct 11 05:08:05 crc kubenswrapper[4651]: I1011 05:08:05.795791 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d5b6d6b67-s97wk" event={"ID":"4fb44cbf-367e-4d0e-95f6-7411e5b76817","Type":"ContainerDied","Data":"a38242094dae6c2b83d4a81f9f113ade0605c607ba64045d8aa3e7b6bf37964f"} Oct 11 05:08:05 crc kubenswrapper[4651]: I1011 05:08:05.795885 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6d5b6d6b67-s97wk" Oct 11 05:08:05 crc kubenswrapper[4651]: I1011 05:08:05.797939 4651 generic.go:334] "Generic (PLEG): container finished" podID="b35ecebc-5355-4ad7-bf37-0d288eed4fdc" containerID="7e749e6c5e39c8b229cde18437b792d4641bdca4e4c93ef3ba37af438e266041" exitCode=0 Oct 11 05:08:05 crc kubenswrapper[4651]: I1011 05:08:05.797964 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-zvfkb" event={"ID":"b35ecebc-5355-4ad7-bf37-0d288eed4fdc","Type":"ContainerDied","Data":"7e749e6c5e39c8b229cde18437b792d4641bdca4e4c93ef3ba37af438e266041"} Oct 11 05:08:05 crc kubenswrapper[4651]: I1011 05:08:05.831488 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4fb44cbf-367e-4d0e-95f6-7411e5b76817-config\") pod \"4fb44cbf-367e-4d0e-95f6-7411e5b76817\" (UID: \"4fb44cbf-367e-4d0e-95f6-7411e5b76817\") " Oct 11 05:08:05 crc kubenswrapper[4651]: I1011 05:08:05.831559 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/786ef132-9cae-4be8-9ffa-7dc9bcd86a5a-config-data\") pod \"786ef132-9cae-4be8-9ffa-7dc9bcd86a5a\" (UID: \"786ef132-9cae-4be8-9ffa-7dc9bcd86a5a\") " Oct 11 05:08:05 crc kubenswrapper[4651]: I1011 05:08:05.831584 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4fb44cbf-367e-4d0e-95f6-7411e5b76817-ovsdbserver-sb\") pod \"4fb44cbf-367e-4d0e-95f6-7411e5b76817\" (UID: \"4fb44cbf-367e-4d0e-95f6-7411e5b76817\") " Oct 11 05:08:05 crc kubenswrapper[4651]: I1011 05:08:05.831619 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fda7363a-5096-4fd1-9549-dcd2e5c4f70b-logs\") pod \"fda7363a-5096-4fd1-9549-dcd2e5c4f70b\" (UID: \"fda7363a-5096-4fd1-9549-dcd2e5c4f70b\") " Oct 11 05:08:05 crc kubenswrapper[4651]: I1011 05:08:05.831641 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4fb44cbf-367e-4d0e-95f6-7411e5b76817-ovsdbserver-nb\") pod \"4fb44cbf-367e-4d0e-95f6-7411e5b76817\" (UID: \"4fb44cbf-367e-4d0e-95f6-7411e5b76817\") " Oct 11 05:08:05 crc kubenswrapper[4651]: I1011 05:08:05.831666 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/fda7363a-5096-4fd1-9549-dcd2e5c4f70b-horizon-secret-key\") pod \"fda7363a-5096-4fd1-9549-dcd2e5c4f70b\" (UID: \"fda7363a-5096-4fd1-9549-dcd2e5c4f70b\") " Oct 11 05:08:05 crc kubenswrapper[4651]: I1011 05:08:05.831689 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cq6qg\" (UniqueName: \"kubernetes.io/projected/fda7363a-5096-4fd1-9549-dcd2e5c4f70b-kube-api-access-cq6qg\") pod \"fda7363a-5096-4fd1-9549-dcd2e5c4f70b\" (UID: \"fda7363a-5096-4fd1-9549-dcd2e5c4f70b\") " Oct 11 05:08:05 crc kubenswrapper[4651]: I1011 05:08:05.831723 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fda7363a-5096-4fd1-9549-dcd2e5c4f70b-scripts\") pod \"fda7363a-5096-4fd1-9549-dcd2e5c4f70b\" (UID: \"fda7363a-5096-4fd1-9549-dcd2e5c4f70b\") " Oct 11 05:08:05 crc kubenswrapper[4651]: I1011 05:08:05.831750 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/786ef132-9cae-4be8-9ffa-7dc9bcd86a5a-logs\") pod \"786ef132-9cae-4be8-9ffa-7dc9bcd86a5a\" (UID: \"786ef132-9cae-4be8-9ffa-7dc9bcd86a5a\") " Oct 11 05:08:05 crc kubenswrapper[4651]: I1011 05:08:05.831781 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c4szm\" (UniqueName: \"kubernetes.io/projected/786ef132-9cae-4be8-9ffa-7dc9bcd86a5a-kube-api-access-c4szm\") pod \"786ef132-9cae-4be8-9ffa-7dc9bcd86a5a\" (UID: \"786ef132-9cae-4be8-9ffa-7dc9bcd86a5a\") " Oct 11 05:08:05 crc kubenswrapper[4651]: I1011 05:08:05.831835 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/fda7363a-5096-4fd1-9549-dcd2e5c4f70b-config-data\") pod \"fda7363a-5096-4fd1-9549-dcd2e5c4f70b\" (UID: \"fda7363a-5096-4fd1-9549-dcd2e5c4f70b\") " Oct 11 05:08:05 crc kubenswrapper[4651]: I1011 05:08:05.831878 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rmwnd\" (UniqueName: \"kubernetes.io/projected/4fb44cbf-367e-4d0e-95f6-7411e5b76817-kube-api-access-rmwnd\") pod \"4fb44cbf-367e-4d0e-95f6-7411e5b76817\" (UID: \"4fb44cbf-367e-4d0e-95f6-7411e5b76817\") " Oct 11 05:08:05 crc kubenswrapper[4651]: I1011 05:08:05.831936 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/786ef132-9cae-4be8-9ffa-7dc9bcd86a5a-horizon-secret-key\") pod \"786ef132-9cae-4be8-9ffa-7dc9bcd86a5a\" (UID: \"786ef132-9cae-4be8-9ffa-7dc9bcd86a5a\") " Oct 11 05:08:05 crc kubenswrapper[4651]: I1011 05:08:05.831977 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4fb44cbf-367e-4d0e-95f6-7411e5b76817-dns-swift-storage-0\") pod \"4fb44cbf-367e-4d0e-95f6-7411e5b76817\" (UID: \"4fb44cbf-367e-4d0e-95f6-7411e5b76817\") " Oct 11 05:08:05 crc kubenswrapper[4651]: I1011 05:08:05.832001 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/786ef132-9cae-4be8-9ffa-7dc9bcd86a5a-scripts\") pod \"786ef132-9cae-4be8-9ffa-7dc9bcd86a5a\" (UID: \"786ef132-9cae-4be8-9ffa-7dc9bcd86a5a\") " Oct 11 05:08:05 crc kubenswrapper[4651]: I1011 05:08:05.832020 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4fb44cbf-367e-4d0e-95f6-7411e5b76817-dns-svc\") pod \"4fb44cbf-367e-4d0e-95f6-7411e5b76817\" (UID: \"4fb44cbf-367e-4d0e-95f6-7411e5b76817\") " Oct 11 05:08:05 crc kubenswrapper[4651]: I1011 05:08:05.832057 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fda7363a-5096-4fd1-9549-dcd2e5c4f70b-logs" (OuterVolumeSpecName: "logs") pod "fda7363a-5096-4fd1-9549-dcd2e5c4f70b" (UID: "fda7363a-5096-4fd1-9549-dcd2e5c4f70b"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 05:08:05 crc kubenswrapper[4651]: I1011 05:08:05.832475 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda7363a-5096-4fd1-9549-dcd2e5c4f70b-scripts" (OuterVolumeSpecName: "scripts") pod "fda7363a-5096-4fd1-9549-dcd2e5c4f70b" (UID: "fda7363a-5096-4fd1-9549-dcd2e5c4f70b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:08:05 crc kubenswrapper[4651]: I1011 05:08:05.832597 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/786ef132-9cae-4be8-9ffa-7dc9bcd86a5a-config-data" (OuterVolumeSpecName: "config-data") pod "786ef132-9cae-4be8-9ffa-7dc9bcd86a5a" (UID: "786ef132-9cae-4be8-9ffa-7dc9bcd86a5a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:08:05 crc kubenswrapper[4651]: I1011 05:08:05.833009 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda7363a-5096-4fd1-9549-dcd2e5c4f70b-config-data" (OuterVolumeSpecName: "config-data") pod "fda7363a-5096-4fd1-9549-dcd2e5c4f70b" (UID: "fda7363a-5096-4fd1-9549-dcd2e5c4f70b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:08:05 crc kubenswrapper[4651]: I1011 05:08:05.833060 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/786ef132-9cae-4be8-9ffa-7dc9bcd86a5a-logs" (OuterVolumeSpecName: "logs") pod "786ef132-9cae-4be8-9ffa-7dc9bcd86a5a" (UID: "786ef132-9cae-4be8-9ffa-7dc9bcd86a5a"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 05:08:05 crc kubenswrapper[4651]: I1011 05:08:05.833540 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/786ef132-9cae-4be8-9ffa-7dc9bcd86a5a-scripts" (OuterVolumeSpecName: "scripts") pod "786ef132-9cae-4be8-9ffa-7dc9bcd86a5a" (UID: "786ef132-9cae-4be8-9ffa-7dc9bcd86a5a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:08:05 crc kubenswrapper[4651]: I1011 05:08:05.836567 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/786ef132-9cae-4be8-9ffa-7dc9bcd86a5a-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "786ef132-9cae-4be8-9ffa-7dc9bcd86a5a" (UID: "786ef132-9cae-4be8-9ffa-7dc9bcd86a5a"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:08:05 crc kubenswrapper[4651]: I1011 05:08:05.844812 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda7363a-5096-4fd1-9549-dcd2e5c4f70b-kube-api-access-cq6qg" (OuterVolumeSpecName: "kube-api-access-cq6qg") pod "fda7363a-5096-4fd1-9549-dcd2e5c4f70b" (UID: "fda7363a-5096-4fd1-9549-dcd2e5c4f70b"). InnerVolumeSpecName "kube-api-access-cq6qg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:08:05 crc kubenswrapper[4651]: I1011 05:08:05.845500 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4fb44cbf-367e-4d0e-95f6-7411e5b76817-kube-api-access-rmwnd" (OuterVolumeSpecName: "kube-api-access-rmwnd") pod "4fb44cbf-367e-4d0e-95f6-7411e5b76817" (UID: "4fb44cbf-367e-4d0e-95f6-7411e5b76817"). InnerVolumeSpecName "kube-api-access-rmwnd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:08:05 crc kubenswrapper[4651]: I1011 05:08:05.845946 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/786ef132-9cae-4be8-9ffa-7dc9bcd86a5a-kube-api-access-c4szm" (OuterVolumeSpecName: "kube-api-access-c4szm") pod "786ef132-9cae-4be8-9ffa-7dc9bcd86a5a" (UID: "786ef132-9cae-4be8-9ffa-7dc9bcd86a5a"). InnerVolumeSpecName "kube-api-access-c4szm". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:08:05 crc kubenswrapper[4651]: I1011 05:08:05.847933 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda7363a-5096-4fd1-9549-dcd2e5c4f70b-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "fda7363a-5096-4fd1-9549-dcd2e5c4f70b" (UID: "fda7363a-5096-4fd1-9549-dcd2e5c4f70b"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:08:05 crc kubenswrapper[4651]: I1011 05:08:05.873908 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4fb44cbf-367e-4d0e-95f6-7411e5b76817-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "4fb44cbf-367e-4d0e-95f6-7411e5b76817" (UID: "4fb44cbf-367e-4d0e-95f6-7411e5b76817"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:08:05 crc kubenswrapper[4651]: I1011 05:08:05.890156 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4fb44cbf-367e-4d0e-95f6-7411e5b76817-config" (OuterVolumeSpecName: "config") pod "4fb44cbf-367e-4d0e-95f6-7411e5b76817" (UID: "4fb44cbf-367e-4d0e-95f6-7411e5b76817"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:08:05 crc kubenswrapper[4651]: I1011 05:08:05.890534 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4fb44cbf-367e-4d0e-95f6-7411e5b76817-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "4fb44cbf-367e-4d0e-95f6-7411e5b76817" (UID: "4fb44cbf-367e-4d0e-95f6-7411e5b76817"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:08:05 crc kubenswrapper[4651]: I1011 05:08:05.900052 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4fb44cbf-367e-4d0e-95f6-7411e5b76817-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "4fb44cbf-367e-4d0e-95f6-7411e5b76817" (UID: "4fb44cbf-367e-4d0e-95f6-7411e5b76817"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:08:05 crc kubenswrapper[4651]: I1011 05:08:05.902108 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4fb44cbf-367e-4d0e-95f6-7411e5b76817-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "4fb44cbf-367e-4d0e-95f6-7411e5b76817" (UID: "4fb44cbf-367e-4d0e-95f6-7411e5b76817"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:08:05 crc kubenswrapper[4651]: I1011 05:08:05.934074 4651 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fda7363a-5096-4fd1-9549-dcd2e5c4f70b-logs\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:05 crc kubenswrapper[4651]: I1011 05:08:05.934106 4651 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4fb44cbf-367e-4d0e-95f6-7411e5b76817-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:05 crc kubenswrapper[4651]: I1011 05:08:05.934117 4651 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/fda7363a-5096-4fd1-9549-dcd2e5c4f70b-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:05 crc kubenswrapper[4651]: I1011 05:08:05.934126 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cq6qg\" (UniqueName: \"kubernetes.io/projected/fda7363a-5096-4fd1-9549-dcd2e5c4f70b-kube-api-access-cq6qg\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:05 crc kubenswrapper[4651]: I1011 05:08:05.934136 4651 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fda7363a-5096-4fd1-9549-dcd2e5c4f70b-scripts\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:05 crc kubenswrapper[4651]: I1011 05:08:05.934144 4651 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/786ef132-9cae-4be8-9ffa-7dc9bcd86a5a-logs\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:05 crc kubenswrapper[4651]: I1011 05:08:05.934152 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c4szm\" (UniqueName: \"kubernetes.io/projected/786ef132-9cae-4be8-9ffa-7dc9bcd86a5a-kube-api-access-c4szm\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:05 crc kubenswrapper[4651]: I1011 05:08:05.934159 4651 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/fda7363a-5096-4fd1-9549-dcd2e5c4f70b-config-data\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:05 crc kubenswrapper[4651]: I1011 05:08:05.934168 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rmwnd\" (UniqueName: \"kubernetes.io/projected/4fb44cbf-367e-4d0e-95f6-7411e5b76817-kube-api-access-rmwnd\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:05 crc kubenswrapper[4651]: I1011 05:08:05.934177 4651 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/786ef132-9cae-4be8-9ffa-7dc9bcd86a5a-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:05 crc kubenswrapper[4651]: I1011 05:08:05.934200 4651 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4fb44cbf-367e-4d0e-95f6-7411e5b76817-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:05 crc kubenswrapper[4651]: I1011 05:08:05.934209 4651 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/786ef132-9cae-4be8-9ffa-7dc9bcd86a5a-scripts\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:05 crc kubenswrapper[4651]: I1011 05:08:05.934217 4651 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4fb44cbf-367e-4d0e-95f6-7411e5b76817-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:05 crc kubenswrapper[4651]: I1011 05:08:05.934226 4651 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4fb44cbf-367e-4d0e-95f6-7411e5b76817-config\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:05 crc kubenswrapper[4651]: I1011 05:08:05.934236 4651 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/786ef132-9cae-4be8-9ffa-7dc9bcd86a5a-config-data\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:05 crc kubenswrapper[4651]: I1011 05:08:05.934245 4651 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4fb44cbf-367e-4d0e-95f6-7411e5b76817-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:06 crc kubenswrapper[4651]: I1011 05:08:06.138293 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-64c56644c5-nxbb9"] Oct 11 05:08:06 crc kubenswrapper[4651]: I1011 05:08:06.153606 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-64c56644c5-nxbb9"] Oct 11 05:08:06 crc kubenswrapper[4651]: I1011 05:08:06.167974 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-69899bbb49-h5pnl"] Oct 11 05:08:06 crc kubenswrapper[4651]: I1011 05:08:06.175318 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-69899bbb49-h5pnl"] Oct 11 05:08:06 crc kubenswrapper[4651]: I1011 05:08:06.189787 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6d5b6d6b67-s97wk"] Oct 11 05:08:06 crc kubenswrapper[4651]: I1011 05:08:06.198044 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6d5b6d6b67-s97wk"] Oct 11 05:08:06 crc kubenswrapper[4651]: E1011 05:08:06.841932 4651 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified" Oct 11 05:08:06 crc kubenswrapper[4651]: E1011 05:08:06.842117 4651 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-nbjpx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-8rxgw_openstack(36b45d75-4e52-49b7-b7d7-13d53d2f7076): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 11 05:08:06 crc kubenswrapper[4651]: E1011 05:08:06.843272 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-8rxgw" podUID="36b45d75-4e52-49b7-b7d7-13d53d2f7076" Oct 11 05:08:07 crc kubenswrapper[4651]: I1011 05:08:07.172304 4651 scope.go:117] "RemoveContainer" containerID="980fd46f041c4e96e3e86fb3bde80c77b81261d420535c38d63a196a86b88e5e" Oct 11 05:08:07 crc kubenswrapper[4651]: E1011 05:08:07.173967 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"980fd46f041c4e96e3e86fb3bde80c77b81261d420535c38d63a196a86b88e5e\": container with ID starting with 980fd46f041c4e96e3e86fb3bde80c77b81261d420535c38d63a196a86b88e5e not found: ID does not exist" containerID="980fd46f041c4e96e3e86fb3bde80c77b81261d420535c38d63a196a86b88e5e" Oct 11 05:08:07 crc kubenswrapper[4651]: I1011 05:08:07.174005 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"980fd46f041c4e96e3e86fb3bde80c77b81261d420535c38d63a196a86b88e5e"} err="failed to get container status \"980fd46f041c4e96e3e86fb3bde80c77b81261d420535c38d63a196a86b88e5e\": rpc error: code = NotFound desc = could not find container \"980fd46f041c4e96e3e86fb3bde80c77b81261d420535c38d63a196a86b88e5e\": container with ID starting with 980fd46f041c4e96e3e86fb3bde80c77b81261d420535c38d63a196a86b88e5e not found: ID does not exist" Oct 11 05:08:07 crc kubenswrapper[4651]: I1011 05:08:07.174030 4651 scope.go:117] "RemoveContainer" containerID="dd08989de1f1be8d0b72125ccc1ec1502812a5ebad8ea6145fc6cf2c3a55f7b8" Oct 11 05:08:07 crc kubenswrapper[4651]: E1011 05:08:07.176344 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dd08989de1f1be8d0b72125ccc1ec1502812a5ebad8ea6145fc6cf2c3a55f7b8\": container with ID starting with dd08989de1f1be8d0b72125ccc1ec1502812a5ebad8ea6145fc6cf2c3a55f7b8 not found: ID does not exist" containerID="dd08989de1f1be8d0b72125ccc1ec1502812a5ebad8ea6145fc6cf2c3a55f7b8" Oct 11 05:08:07 crc kubenswrapper[4651]: I1011 05:08:07.176371 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dd08989de1f1be8d0b72125ccc1ec1502812a5ebad8ea6145fc6cf2c3a55f7b8"} err="failed to get container status \"dd08989de1f1be8d0b72125ccc1ec1502812a5ebad8ea6145fc6cf2c3a55f7b8\": rpc error: code = NotFound desc = could not find container \"dd08989de1f1be8d0b72125ccc1ec1502812a5ebad8ea6145fc6cf2c3a55f7b8\": container with ID starting with dd08989de1f1be8d0b72125ccc1ec1502812a5ebad8ea6145fc6cf2c3a55f7b8 not found: ID does not exist" Oct 11 05:08:07 crc kubenswrapper[4651]: I1011 05:08:07.176389 4651 scope.go:117] "RemoveContainer" containerID="980fd46f041c4e96e3e86fb3bde80c77b81261d420535c38d63a196a86b88e5e" Oct 11 05:08:07 crc kubenswrapper[4651]: I1011 05:08:07.176963 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"980fd46f041c4e96e3e86fb3bde80c77b81261d420535c38d63a196a86b88e5e"} err="failed to get container status \"980fd46f041c4e96e3e86fb3bde80c77b81261d420535c38d63a196a86b88e5e\": rpc error: code = NotFound desc = could not find container \"980fd46f041c4e96e3e86fb3bde80c77b81261d420535c38d63a196a86b88e5e\": container with ID starting with 980fd46f041c4e96e3e86fb3bde80c77b81261d420535c38d63a196a86b88e5e not found: ID does not exist" Oct 11 05:08:07 crc kubenswrapper[4651]: I1011 05:08:07.177007 4651 scope.go:117] "RemoveContainer" containerID="dd08989de1f1be8d0b72125ccc1ec1502812a5ebad8ea6145fc6cf2c3a55f7b8" Oct 11 05:08:07 crc kubenswrapper[4651]: I1011 05:08:07.177311 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dd08989de1f1be8d0b72125ccc1ec1502812a5ebad8ea6145fc6cf2c3a55f7b8"} err="failed to get container status \"dd08989de1f1be8d0b72125ccc1ec1502812a5ebad8ea6145fc6cf2c3a55f7b8\": rpc error: code = NotFound desc = could not find container \"dd08989de1f1be8d0b72125ccc1ec1502812a5ebad8ea6145fc6cf2c3a55f7b8\": container with ID starting with dd08989de1f1be8d0b72125ccc1ec1502812a5ebad8ea6145fc6cf2c3a55f7b8 not found: ID does not exist" Oct 11 05:08:07 crc kubenswrapper[4651]: I1011 05:08:07.177336 4651 scope.go:117] "RemoveContainer" containerID="7704156b0cbfa5e6bfb0dc4ce81d0ef1948d4485444be7b2fc4b5a3768746120" Oct 11 05:08:07 crc kubenswrapper[4651]: I1011 05:08:07.301676 4651 scope.go:117] "RemoveContainer" containerID="4365429e5a86c310c6592e2cebbac5f174487c546f08fb667e2532d2c07b6357" Oct 11 05:08:07 crc kubenswrapper[4651]: I1011 05:08:07.392335 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-zvfkb" Oct 11 05:08:07 crc kubenswrapper[4651]: I1011 05:08:07.473191 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xqqzp\" (UniqueName: \"kubernetes.io/projected/b35ecebc-5355-4ad7-bf37-0d288eed4fdc-kube-api-access-xqqzp\") pod \"b35ecebc-5355-4ad7-bf37-0d288eed4fdc\" (UID: \"b35ecebc-5355-4ad7-bf37-0d288eed4fdc\") " Oct 11 05:08:07 crc kubenswrapper[4651]: I1011 05:08:07.473240 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b35ecebc-5355-4ad7-bf37-0d288eed4fdc-combined-ca-bundle\") pod \"b35ecebc-5355-4ad7-bf37-0d288eed4fdc\" (UID: \"b35ecebc-5355-4ad7-bf37-0d288eed4fdc\") " Oct 11 05:08:07 crc kubenswrapper[4651]: I1011 05:08:07.473320 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/b35ecebc-5355-4ad7-bf37-0d288eed4fdc-config\") pod \"b35ecebc-5355-4ad7-bf37-0d288eed4fdc\" (UID: \"b35ecebc-5355-4ad7-bf37-0d288eed4fdc\") " Oct 11 05:08:07 crc kubenswrapper[4651]: I1011 05:08:07.479888 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b35ecebc-5355-4ad7-bf37-0d288eed4fdc-kube-api-access-xqqzp" (OuterVolumeSpecName: "kube-api-access-xqqzp") pod "b35ecebc-5355-4ad7-bf37-0d288eed4fdc" (UID: "b35ecebc-5355-4ad7-bf37-0d288eed4fdc"). InnerVolumeSpecName "kube-api-access-xqqzp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:08:07 crc kubenswrapper[4651]: I1011 05:08:07.489218 4651 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-6d5b6d6b67-s97wk" podUID="4fb44cbf-367e-4d0e-95f6-7411e5b76817" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.126:5353: i/o timeout" Oct 11 05:08:07 crc kubenswrapper[4651]: I1011 05:08:07.509233 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b35ecebc-5355-4ad7-bf37-0d288eed4fdc-config" (OuterVolumeSpecName: "config") pod "b35ecebc-5355-4ad7-bf37-0d288eed4fdc" (UID: "b35ecebc-5355-4ad7-bf37-0d288eed4fdc"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:08:07 crc kubenswrapper[4651]: I1011 05:08:07.514955 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b35ecebc-5355-4ad7-bf37-0d288eed4fdc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b35ecebc-5355-4ad7-bf37-0d288eed4fdc" (UID: "b35ecebc-5355-4ad7-bf37-0d288eed4fdc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:08:07 crc kubenswrapper[4651]: I1011 05:08:07.541426 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-5f7d84485b-zb5s7"] Oct 11 05:08:07 crc kubenswrapper[4651]: I1011 05:08:07.548903 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-68976f6bc6-9jl66"] Oct 11 05:08:07 crc kubenswrapper[4651]: I1011 05:08:07.574938 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xqqzp\" (UniqueName: \"kubernetes.io/projected/b35ecebc-5355-4ad7-bf37-0d288eed4fdc-kube-api-access-xqqzp\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:07 crc kubenswrapper[4651]: I1011 05:08:07.574969 4651 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b35ecebc-5355-4ad7-bf37-0d288eed4fdc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:07 crc kubenswrapper[4651]: I1011 05:08:07.574979 4651 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/b35ecebc-5355-4ad7-bf37-0d288eed4fdc-config\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:07 crc kubenswrapper[4651]: I1011 05:08:07.688489 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 11 05:08:07 crc kubenswrapper[4651]: I1011 05:08:07.758884 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-9rf4f"] Oct 11 05:08:07 crc kubenswrapper[4651]: I1011 05:08:07.819592 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-zvfkb" event={"ID":"b35ecebc-5355-4ad7-bf37-0d288eed4fdc","Type":"ContainerDied","Data":"c462253d5364923f564211a3559ee702c4cd925584eb7a81b2dc0b09efb1849b"} Oct 11 05:08:07 crc kubenswrapper[4651]: I1011 05:08:07.819988 4651 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c462253d5364923f564211a3559ee702c4cd925584eb7a81b2dc0b09efb1849b" Oct 11 05:08:07 crc kubenswrapper[4651]: I1011 05:08:07.820071 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-zvfkb" Oct 11 05:08:07 crc kubenswrapper[4651]: I1011 05:08:07.821278 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"f2d90a7b-5995-43a7-8c9b-05639ac9e9fe","Type":"ContainerStarted","Data":"a0f5491efa2fa93221aa88d4f90d9064e78a2f60e0b3eb331b8106d259391f2d"} Oct 11 05:08:07 crc kubenswrapper[4651]: I1011 05:08:07.822310 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5f7d84485b-zb5s7" event={"ID":"a75ab800-fe1f-453d-b9f9-1cc1f39b6ca7","Type":"ContainerStarted","Data":"3a396a234b7bf73d2bce090c84c6b5382b0574cd42d35cca06c1a3d4c800933b"} Oct 11 05:08:07 crc kubenswrapper[4651]: I1011 05:08:07.822385 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5f7d84485b-zb5s7" event={"ID":"a75ab800-fe1f-453d-b9f9-1cc1f39b6ca7","Type":"ContainerStarted","Data":"eb8faf9395b697296036fd1188ffa3082d5ceedda448a3430e0896ecf17ab435"} Oct 11 05:08:07 crc kubenswrapper[4651]: I1011 05:08:07.823863 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"29d2e57f-1259-4f23-9e62-ce5d86ace5a9","Type":"ContainerStarted","Data":"85ac2eecb7b62aa74139642ce4a97e46169099193040e7d16a8b4dfda52ef21e"} Oct 11 05:08:07 crc kubenswrapper[4651]: I1011 05:08:07.825364 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-75955bb549-s67lw" event={"ID":"74e74db5-efd0-4198-82a1-ef76d751d1de","Type":"ContainerStarted","Data":"a264ec5f5afb5d6db4164d085243e37cc4fcbe01b71b5337d59dab6d14834176"} Oct 11 05:08:07 crc kubenswrapper[4651]: I1011 05:08:07.825443 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-75955bb549-s67lw" event={"ID":"74e74db5-efd0-4198-82a1-ef76d751d1de","Type":"ContainerStarted","Data":"5d83667756f591ba7d0455576d7fbf6bc31819811cfe61dbada388b269524281"} Oct 11 05:08:07 crc kubenswrapper[4651]: I1011 05:08:07.825593 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-75955bb549-s67lw" podUID="74e74db5-efd0-4198-82a1-ef76d751d1de" containerName="horizon-log" containerID="cri-o://5d83667756f591ba7d0455576d7fbf6bc31819811cfe61dbada388b269524281" gracePeriod=30 Oct 11 05:08:07 crc kubenswrapper[4651]: I1011 05:08:07.825858 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-75955bb549-s67lw" podUID="74e74db5-efd0-4198-82a1-ef76d751d1de" containerName="horizon" containerID="cri-o://a264ec5f5afb5d6db4164d085243e37cc4fcbe01b71b5337d59dab6d14834176" gracePeriod=30 Oct 11 05:08:07 crc kubenswrapper[4651]: I1011 05:08:07.835462 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-68976f6bc6-9jl66" event={"ID":"93e6a5fd-218d-44c2-9bf9-2796b1ff0c33","Type":"ContainerStarted","Data":"321c90e912a2b24f9c5662f28a92c8cc11f803ff3e2b862071cbf01111dbe4be"} Oct 11 05:08:07 crc kubenswrapper[4651]: I1011 05:08:07.835497 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-68976f6bc6-9jl66" event={"ID":"93e6a5fd-218d-44c2-9bf9-2796b1ff0c33","Type":"ContainerStarted","Data":"f5372438287e062570d4dc3eff8b3374de214d326c9efd0c419844b37f3d3141"} Oct 11 05:08:07 crc kubenswrapper[4651]: I1011 05:08:07.848852 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"16b0d24a-e647-4381-9f03-9b48c34ba52f","Type":"ContainerStarted","Data":"9694fee13b033094ce30c5aac1dfc12712a8e18e5878f97c2f379916d2059414"} Oct 11 05:08:07 crc kubenswrapper[4651]: I1011 05:08:07.857297 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-9rf4f" event={"ID":"261ffa67-4305-4260-903d-93b8af576721","Type":"ContainerStarted","Data":"e7b03273c6c46ec518ef5af2fb012a499e1985e542ef18ecee3ccb7d8b86cb91"} Oct 11 05:08:07 crc kubenswrapper[4651]: I1011 05:08:07.865518 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-75955bb549-s67lw" podStartSLOduration=2.305857151 podStartE2EDuration="27.865500209s" podCreationTimestamp="2025-10-11 05:07:40 +0000 UTC" firstStartedPulling="2025-10-11 05:07:41.223065119 +0000 UTC m=+982.119297915" lastFinishedPulling="2025-10-11 05:08:06.782708177 +0000 UTC m=+1007.678940973" observedRunningTime="2025-10-11 05:08:07.848609419 +0000 UTC m=+1008.744842235" watchObservedRunningTime="2025-10-11 05:08:07.865500209 +0000 UTC m=+1008.761733005" Oct 11 05:08:07 crc kubenswrapper[4651]: E1011 05:08:07.892280 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified\\\"\"" pod="openstack/cinder-db-sync-8rxgw" podUID="36b45d75-4e52-49b7-b7d7-13d53d2f7076" Oct 11 05:08:07 crc kubenswrapper[4651]: I1011 05:08:07.982141 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4fb44cbf-367e-4d0e-95f6-7411e5b76817" path="/var/lib/kubelet/pods/4fb44cbf-367e-4d0e-95f6-7411e5b76817/volumes" Oct 11 05:08:07 crc kubenswrapper[4651]: I1011 05:08:07.983163 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="786ef132-9cae-4be8-9ffa-7dc9bcd86a5a" path="/var/lib/kubelet/pods/786ef132-9cae-4be8-9ffa-7dc9bcd86a5a/volumes" Oct 11 05:08:07 crc kubenswrapper[4651]: I1011 05:08:07.983791 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda7363a-5096-4fd1-9549-dcd2e5c4f70b" path="/var/lib/kubelet/pods/fda7363a-5096-4fd1-9549-dcd2e5c4f70b/volumes" Oct 11 05:08:07 crc kubenswrapper[4651]: I1011 05:08:07.991520 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5ccc5c4795-9pngt"] Oct 11 05:08:07 crc kubenswrapper[4651]: E1011 05:08:07.992076 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4fb44cbf-367e-4d0e-95f6-7411e5b76817" containerName="init" Oct 11 05:08:07 crc kubenswrapper[4651]: I1011 05:08:07.992162 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="4fb44cbf-367e-4d0e-95f6-7411e5b76817" containerName="init" Oct 11 05:08:07 crc kubenswrapper[4651]: E1011 05:08:07.992226 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b35ecebc-5355-4ad7-bf37-0d288eed4fdc" containerName="neutron-db-sync" Oct 11 05:08:07 crc kubenswrapper[4651]: I1011 05:08:07.992288 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="b35ecebc-5355-4ad7-bf37-0d288eed4fdc" containerName="neutron-db-sync" Oct 11 05:08:07 crc kubenswrapper[4651]: E1011 05:08:07.992406 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4fb44cbf-367e-4d0e-95f6-7411e5b76817" containerName="dnsmasq-dns" Oct 11 05:08:07 crc kubenswrapper[4651]: I1011 05:08:07.992507 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="4fb44cbf-367e-4d0e-95f6-7411e5b76817" containerName="dnsmasq-dns" Oct 11 05:08:07 crc kubenswrapper[4651]: I1011 05:08:07.992982 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="4fb44cbf-367e-4d0e-95f6-7411e5b76817" containerName="dnsmasq-dns" Oct 11 05:08:07 crc kubenswrapper[4651]: I1011 05:08:07.993112 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="b35ecebc-5355-4ad7-bf37-0d288eed4fdc" containerName="neutron-db-sync" Oct 11 05:08:07 crc kubenswrapper[4651]: I1011 05:08:07.996292 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5ccc5c4795-9pngt" Oct 11 05:08:08 crc kubenswrapper[4651]: I1011 05:08:08.013675 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5ccc5c4795-9pngt"] Oct 11 05:08:08 crc kubenswrapper[4651]: I1011 05:08:08.084844 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/65b0df64-2ebb-4c09-813e-1be5beb4e8ed-ovsdbserver-sb\") pod \"dnsmasq-dns-5ccc5c4795-9pngt\" (UID: \"65b0df64-2ebb-4c09-813e-1be5beb4e8ed\") " pod="openstack/dnsmasq-dns-5ccc5c4795-9pngt" Oct 11 05:08:08 crc kubenswrapper[4651]: I1011 05:08:08.085069 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c9ktv\" (UniqueName: \"kubernetes.io/projected/65b0df64-2ebb-4c09-813e-1be5beb4e8ed-kube-api-access-c9ktv\") pod \"dnsmasq-dns-5ccc5c4795-9pngt\" (UID: \"65b0df64-2ebb-4c09-813e-1be5beb4e8ed\") " pod="openstack/dnsmasq-dns-5ccc5c4795-9pngt" Oct 11 05:08:08 crc kubenswrapper[4651]: I1011 05:08:08.085202 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/65b0df64-2ebb-4c09-813e-1be5beb4e8ed-dns-svc\") pod \"dnsmasq-dns-5ccc5c4795-9pngt\" (UID: \"65b0df64-2ebb-4c09-813e-1be5beb4e8ed\") " pod="openstack/dnsmasq-dns-5ccc5c4795-9pngt" Oct 11 05:08:08 crc kubenswrapper[4651]: I1011 05:08:08.085297 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/65b0df64-2ebb-4c09-813e-1be5beb4e8ed-ovsdbserver-nb\") pod \"dnsmasq-dns-5ccc5c4795-9pngt\" (UID: \"65b0df64-2ebb-4c09-813e-1be5beb4e8ed\") " pod="openstack/dnsmasq-dns-5ccc5c4795-9pngt" Oct 11 05:08:08 crc kubenswrapper[4651]: I1011 05:08:08.085396 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/65b0df64-2ebb-4c09-813e-1be5beb4e8ed-dns-swift-storage-0\") pod \"dnsmasq-dns-5ccc5c4795-9pngt\" (UID: \"65b0df64-2ebb-4c09-813e-1be5beb4e8ed\") " pod="openstack/dnsmasq-dns-5ccc5c4795-9pngt" Oct 11 05:08:08 crc kubenswrapper[4651]: I1011 05:08:08.085602 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/65b0df64-2ebb-4c09-813e-1be5beb4e8ed-config\") pod \"dnsmasq-dns-5ccc5c4795-9pngt\" (UID: \"65b0df64-2ebb-4c09-813e-1be5beb4e8ed\") " pod="openstack/dnsmasq-dns-5ccc5c4795-9pngt" Oct 11 05:08:08 crc kubenswrapper[4651]: I1011 05:08:08.178502 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-79fc6b7784-n2xpf"] Oct 11 05:08:08 crc kubenswrapper[4651]: I1011 05:08:08.190896 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-79fc6b7784-n2xpf" Oct 11 05:08:08 crc kubenswrapper[4651]: I1011 05:08:08.191989 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/65b0df64-2ebb-4c09-813e-1be5beb4e8ed-config\") pod \"dnsmasq-dns-5ccc5c4795-9pngt\" (UID: \"65b0df64-2ebb-4c09-813e-1be5beb4e8ed\") " pod="openstack/dnsmasq-dns-5ccc5c4795-9pngt" Oct 11 05:08:08 crc kubenswrapper[4651]: I1011 05:08:08.192029 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/65b0df64-2ebb-4c09-813e-1be5beb4e8ed-ovsdbserver-sb\") pod \"dnsmasq-dns-5ccc5c4795-9pngt\" (UID: \"65b0df64-2ebb-4c09-813e-1be5beb4e8ed\") " pod="openstack/dnsmasq-dns-5ccc5c4795-9pngt" Oct 11 05:08:08 crc kubenswrapper[4651]: I1011 05:08:08.192053 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c9ktv\" (UniqueName: \"kubernetes.io/projected/65b0df64-2ebb-4c09-813e-1be5beb4e8ed-kube-api-access-c9ktv\") pod \"dnsmasq-dns-5ccc5c4795-9pngt\" (UID: \"65b0df64-2ebb-4c09-813e-1be5beb4e8ed\") " pod="openstack/dnsmasq-dns-5ccc5c4795-9pngt" Oct 11 05:08:08 crc kubenswrapper[4651]: I1011 05:08:08.192078 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/65b0df64-2ebb-4c09-813e-1be5beb4e8ed-dns-svc\") pod \"dnsmasq-dns-5ccc5c4795-9pngt\" (UID: \"65b0df64-2ebb-4c09-813e-1be5beb4e8ed\") " pod="openstack/dnsmasq-dns-5ccc5c4795-9pngt" Oct 11 05:08:08 crc kubenswrapper[4651]: I1011 05:08:08.192095 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/65b0df64-2ebb-4c09-813e-1be5beb4e8ed-ovsdbserver-nb\") pod \"dnsmasq-dns-5ccc5c4795-9pngt\" (UID: \"65b0df64-2ebb-4c09-813e-1be5beb4e8ed\") " pod="openstack/dnsmasq-dns-5ccc5c4795-9pngt" Oct 11 05:08:08 crc kubenswrapper[4651]: I1011 05:08:08.192117 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/65b0df64-2ebb-4c09-813e-1be5beb4e8ed-dns-swift-storage-0\") pod \"dnsmasq-dns-5ccc5c4795-9pngt\" (UID: \"65b0df64-2ebb-4c09-813e-1be5beb4e8ed\") " pod="openstack/dnsmasq-dns-5ccc5c4795-9pngt" Oct 11 05:08:08 crc kubenswrapper[4651]: I1011 05:08:08.195713 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/65b0df64-2ebb-4c09-813e-1be5beb4e8ed-dns-swift-storage-0\") pod \"dnsmasq-dns-5ccc5c4795-9pngt\" (UID: \"65b0df64-2ebb-4c09-813e-1be5beb4e8ed\") " pod="openstack/dnsmasq-dns-5ccc5c4795-9pngt" Oct 11 05:08:08 crc kubenswrapper[4651]: I1011 05:08:08.196310 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/65b0df64-2ebb-4c09-813e-1be5beb4e8ed-dns-svc\") pod \"dnsmasq-dns-5ccc5c4795-9pngt\" (UID: \"65b0df64-2ebb-4c09-813e-1be5beb4e8ed\") " pod="openstack/dnsmasq-dns-5ccc5c4795-9pngt" Oct 11 05:08:08 crc kubenswrapper[4651]: I1011 05:08:08.196620 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/65b0df64-2ebb-4c09-813e-1be5beb4e8ed-config\") pod \"dnsmasq-dns-5ccc5c4795-9pngt\" (UID: \"65b0df64-2ebb-4c09-813e-1be5beb4e8ed\") " pod="openstack/dnsmasq-dns-5ccc5c4795-9pngt" Oct 11 05:08:08 crc kubenswrapper[4651]: I1011 05:08:08.197185 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/65b0df64-2ebb-4c09-813e-1be5beb4e8ed-ovsdbserver-sb\") pod \"dnsmasq-dns-5ccc5c4795-9pngt\" (UID: \"65b0df64-2ebb-4c09-813e-1be5beb4e8ed\") " pod="openstack/dnsmasq-dns-5ccc5c4795-9pngt" Oct 11 05:08:08 crc kubenswrapper[4651]: I1011 05:08:08.205088 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/65b0df64-2ebb-4c09-813e-1be5beb4e8ed-ovsdbserver-nb\") pod \"dnsmasq-dns-5ccc5c4795-9pngt\" (UID: \"65b0df64-2ebb-4c09-813e-1be5beb4e8ed\") " pod="openstack/dnsmasq-dns-5ccc5c4795-9pngt" Oct 11 05:08:08 crc kubenswrapper[4651]: I1011 05:08:08.205761 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Oct 11 05:08:08 crc kubenswrapper[4651]: I1011 05:08:08.205966 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Oct 11 05:08:08 crc kubenswrapper[4651]: I1011 05:08:08.206041 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-kqtgr" Oct 11 05:08:08 crc kubenswrapper[4651]: I1011 05:08:08.206060 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Oct 11 05:08:08 crc kubenswrapper[4651]: I1011 05:08:08.213756 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-79fc6b7784-n2xpf"] Oct 11 05:08:08 crc kubenswrapper[4651]: I1011 05:08:08.227793 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c9ktv\" (UniqueName: \"kubernetes.io/projected/65b0df64-2ebb-4c09-813e-1be5beb4e8ed-kube-api-access-c9ktv\") pod \"dnsmasq-dns-5ccc5c4795-9pngt\" (UID: \"65b0df64-2ebb-4c09-813e-1be5beb4e8ed\") " pod="openstack/dnsmasq-dns-5ccc5c4795-9pngt" Oct 11 05:08:08 crc kubenswrapper[4651]: I1011 05:08:08.293226 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/84763f01-3ff4-49ae-a364-e54b62308ff0-config\") pod \"neutron-79fc6b7784-n2xpf\" (UID: \"84763f01-3ff4-49ae-a364-e54b62308ff0\") " pod="openstack/neutron-79fc6b7784-n2xpf" Oct 11 05:08:08 crc kubenswrapper[4651]: I1011 05:08:08.293348 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/84763f01-3ff4-49ae-a364-e54b62308ff0-httpd-config\") pod \"neutron-79fc6b7784-n2xpf\" (UID: \"84763f01-3ff4-49ae-a364-e54b62308ff0\") " pod="openstack/neutron-79fc6b7784-n2xpf" Oct 11 05:08:08 crc kubenswrapper[4651]: I1011 05:08:08.293394 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/84763f01-3ff4-49ae-a364-e54b62308ff0-ovndb-tls-certs\") pod \"neutron-79fc6b7784-n2xpf\" (UID: \"84763f01-3ff4-49ae-a364-e54b62308ff0\") " pod="openstack/neutron-79fc6b7784-n2xpf" Oct 11 05:08:08 crc kubenswrapper[4651]: I1011 05:08:08.293411 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/84763f01-3ff4-49ae-a364-e54b62308ff0-combined-ca-bundle\") pod \"neutron-79fc6b7784-n2xpf\" (UID: \"84763f01-3ff4-49ae-a364-e54b62308ff0\") " pod="openstack/neutron-79fc6b7784-n2xpf" Oct 11 05:08:08 crc kubenswrapper[4651]: I1011 05:08:08.293439 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-75ct4\" (UniqueName: \"kubernetes.io/projected/84763f01-3ff4-49ae-a364-e54b62308ff0-kube-api-access-75ct4\") pod \"neutron-79fc6b7784-n2xpf\" (UID: \"84763f01-3ff4-49ae-a364-e54b62308ff0\") " pod="openstack/neutron-79fc6b7784-n2xpf" Oct 11 05:08:08 crc kubenswrapper[4651]: I1011 05:08:08.369252 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5ccc5c4795-9pngt" Oct 11 05:08:08 crc kubenswrapper[4651]: I1011 05:08:08.395024 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/84763f01-3ff4-49ae-a364-e54b62308ff0-httpd-config\") pod \"neutron-79fc6b7784-n2xpf\" (UID: \"84763f01-3ff4-49ae-a364-e54b62308ff0\") " pod="openstack/neutron-79fc6b7784-n2xpf" Oct 11 05:08:08 crc kubenswrapper[4651]: I1011 05:08:08.395123 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/84763f01-3ff4-49ae-a364-e54b62308ff0-ovndb-tls-certs\") pod \"neutron-79fc6b7784-n2xpf\" (UID: \"84763f01-3ff4-49ae-a364-e54b62308ff0\") " pod="openstack/neutron-79fc6b7784-n2xpf" Oct 11 05:08:08 crc kubenswrapper[4651]: I1011 05:08:08.395794 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/84763f01-3ff4-49ae-a364-e54b62308ff0-combined-ca-bundle\") pod \"neutron-79fc6b7784-n2xpf\" (UID: \"84763f01-3ff4-49ae-a364-e54b62308ff0\") " pod="openstack/neutron-79fc6b7784-n2xpf" Oct 11 05:08:08 crc kubenswrapper[4651]: I1011 05:08:08.395854 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-75ct4\" (UniqueName: \"kubernetes.io/projected/84763f01-3ff4-49ae-a364-e54b62308ff0-kube-api-access-75ct4\") pod \"neutron-79fc6b7784-n2xpf\" (UID: \"84763f01-3ff4-49ae-a364-e54b62308ff0\") " pod="openstack/neutron-79fc6b7784-n2xpf" Oct 11 05:08:08 crc kubenswrapper[4651]: I1011 05:08:08.395874 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/84763f01-3ff4-49ae-a364-e54b62308ff0-config\") pod \"neutron-79fc6b7784-n2xpf\" (UID: \"84763f01-3ff4-49ae-a364-e54b62308ff0\") " pod="openstack/neutron-79fc6b7784-n2xpf" Oct 11 05:08:08 crc kubenswrapper[4651]: I1011 05:08:08.399044 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/84763f01-3ff4-49ae-a364-e54b62308ff0-httpd-config\") pod \"neutron-79fc6b7784-n2xpf\" (UID: \"84763f01-3ff4-49ae-a364-e54b62308ff0\") " pod="openstack/neutron-79fc6b7784-n2xpf" Oct 11 05:08:08 crc kubenswrapper[4651]: I1011 05:08:08.399884 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/84763f01-3ff4-49ae-a364-e54b62308ff0-config\") pod \"neutron-79fc6b7784-n2xpf\" (UID: \"84763f01-3ff4-49ae-a364-e54b62308ff0\") " pod="openstack/neutron-79fc6b7784-n2xpf" Oct 11 05:08:08 crc kubenswrapper[4651]: I1011 05:08:08.400166 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/84763f01-3ff4-49ae-a364-e54b62308ff0-combined-ca-bundle\") pod \"neutron-79fc6b7784-n2xpf\" (UID: \"84763f01-3ff4-49ae-a364-e54b62308ff0\") " pod="openstack/neutron-79fc6b7784-n2xpf" Oct 11 05:08:08 crc kubenswrapper[4651]: I1011 05:08:08.414568 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/84763f01-3ff4-49ae-a364-e54b62308ff0-ovndb-tls-certs\") pod \"neutron-79fc6b7784-n2xpf\" (UID: \"84763f01-3ff4-49ae-a364-e54b62308ff0\") " pod="openstack/neutron-79fc6b7784-n2xpf" Oct 11 05:08:08 crc kubenswrapper[4651]: I1011 05:08:08.417555 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-75ct4\" (UniqueName: \"kubernetes.io/projected/84763f01-3ff4-49ae-a364-e54b62308ff0-kube-api-access-75ct4\") pod \"neutron-79fc6b7784-n2xpf\" (UID: \"84763f01-3ff4-49ae-a364-e54b62308ff0\") " pod="openstack/neutron-79fc6b7784-n2xpf" Oct 11 05:08:08 crc kubenswrapper[4651]: I1011 05:08:08.583831 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-79fc6b7784-n2xpf" Oct 11 05:08:08 crc kubenswrapper[4651]: I1011 05:08:08.878931 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5f7d84485b-zb5s7" event={"ID":"a75ab800-fe1f-453d-b9f9-1cc1f39b6ca7","Type":"ContainerStarted","Data":"33f65508f80f2124af790a9030aa927083cf682fe710e53d62b36e85774540d0"} Oct 11 05:08:08 crc kubenswrapper[4651]: I1011 05:08:08.905871 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"29d2e57f-1259-4f23-9e62-ce5d86ace5a9","Type":"ContainerStarted","Data":"c5b3a53861d8beac080545dd8f9b1a17ddefa4c84e423457e10c1e2c25563913"} Oct 11 05:08:08 crc kubenswrapper[4651]: I1011 05:08:08.906028 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="29d2e57f-1259-4f23-9e62-ce5d86ace5a9" containerName="glance-log" containerID="cri-o://85ac2eecb7b62aa74139642ce4a97e46169099193040e7d16a8b4dfda52ef21e" gracePeriod=30 Oct 11 05:08:08 crc kubenswrapper[4651]: I1011 05:08:08.906449 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="29d2e57f-1259-4f23-9e62-ce5d86ace5a9" containerName="glance-httpd" containerID="cri-o://c5b3a53861d8beac080545dd8f9b1a17ddefa4c84e423457e10c1e2c25563913" gracePeriod=30 Oct 11 05:08:08 crc kubenswrapper[4651]: I1011 05:08:08.910751 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-9rf4f" event={"ID":"261ffa67-4305-4260-903d-93b8af576721","Type":"ContainerStarted","Data":"5fdd10400e07e88a06a5fdb658a6364edb433042dc71c45303e4ee46efa5fd26"} Oct 11 05:08:08 crc kubenswrapper[4651]: I1011 05:08:08.912193 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-5f7d84485b-zb5s7" podStartSLOduration=23.912177452999998 podStartE2EDuration="23.912177453s" podCreationTimestamp="2025-10-11 05:07:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 05:08:08.90222861 +0000 UTC m=+1009.798461416" watchObservedRunningTime="2025-10-11 05:08:08.912177453 +0000 UTC m=+1009.808410249" Oct 11 05:08:08 crc kubenswrapper[4651]: I1011 05:08:08.921085 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-68976f6bc6-9jl66" event={"ID":"93e6a5fd-218d-44c2-9bf9-2796b1ff0c33","Type":"ContainerStarted","Data":"e808ba8643e65bfa96c6d2b0dba9c582de46d8fb985cecf451cd1bb241335b5b"} Oct 11 05:08:08 crc kubenswrapper[4651]: I1011 05:08:08.938036 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=27.938015951 podStartE2EDuration="27.938015951s" podCreationTimestamp="2025-10-11 05:07:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 05:08:08.928303464 +0000 UTC m=+1009.824536270" watchObservedRunningTime="2025-10-11 05:08:08.938015951 +0000 UTC m=+1009.834248747" Oct 11 05:08:08 crc kubenswrapper[4651]: I1011 05:08:08.975989 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"f2d90a7b-5995-43a7-8c9b-05639ac9e9fe","Type":"ContainerStarted","Data":"41f4eb004f48b1d6cabf3fe68dbf9b05944491f79be5cfb2f75ae208b8e80204"} Oct 11 05:08:09 crc kubenswrapper[4651]: I1011 05:08:09.022163 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-9rf4f" podStartSLOduration=9.022147052 podStartE2EDuration="9.022147052s" podCreationTimestamp="2025-10-11 05:08:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 05:08:08.956112651 +0000 UTC m=+1009.852345457" watchObservedRunningTime="2025-10-11 05:08:09.022147052 +0000 UTC m=+1009.918379848" Oct 11 05:08:09 crc kubenswrapper[4651]: I1011 05:08:09.050036 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5ccc5c4795-9pngt"] Oct 11 05:08:09 crc kubenswrapper[4651]: I1011 05:08:09.059686 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-68976f6bc6-9jl66" podStartSLOduration=24.059664506 podStartE2EDuration="24.059664506s" podCreationTimestamp="2025-10-11 05:07:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 05:08:09.030153825 +0000 UTC m=+1009.926386641" watchObservedRunningTime="2025-10-11 05:08:09.059664506 +0000 UTC m=+1009.955897302" Oct 11 05:08:09 crc kubenswrapper[4651]: I1011 05:08:09.200347 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-79fc6b7784-n2xpf"] Oct 11 05:08:09 crc kubenswrapper[4651]: W1011 05:08:09.233322 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod84763f01_3ff4_49ae_a364_e54b62308ff0.slice/crio-8b8c2de02d00e20dbafdc012e91c04ae2fc9629f92c9dd88462c168c33af4a90 WatchSource:0}: Error finding container 8b8c2de02d00e20dbafdc012e91c04ae2fc9629f92c9dd88462c168c33af4a90: Status 404 returned error can't find the container with id 8b8c2de02d00e20dbafdc012e91c04ae2fc9629f92c9dd88462c168c33af4a90 Oct 11 05:08:09 crc kubenswrapper[4651]: I1011 05:08:09.595795 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 11 05:08:09 crc kubenswrapper[4651]: I1011 05:08:09.741886 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/29d2e57f-1259-4f23-9e62-ce5d86ace5a9-logs\") pod \"29d2e57f-1259-4f23-9e62-ce5d86ace5a9\" (UID: \"29d2e57f-1259-4f23-9e62-ce5d86ace5a9\") " Oct 11 05:08:09 crc kubenswrapper[4651]: I1011 05:08:09.742536 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29d2e57f-1259-4f23-9e62-ce5d86ace5a9-combined-ca-bundle\") pod \"29d2e57f-1259-4f23-9e62-ce5d86ace5a9\" (UID: \"29d2e57f-1259-4f23-9e62-ce5d86ace5a9\") " Oct 11 05:08:09 crc kubenswrapper[4651]: I1011 05:08:09.743137 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"29d2e57f-1259-4f23-9e62-ce5d86ace5a9\" (UID: \"29d2e57f-1259-4f23-9e62-ce5d86ace5a9\") " Oct 11 05:08:09 crc kubenswrapper[4651]: I1011 05:08:09.743199 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/29d2e57f-1259-4f23-9e62-ce5d86ace5a9-scripts\") pod \"29d2e57f-1259-4f23-9e62-ce5d86ace5a9\" (UID: \"29d2e57f-1259-4f23-9e62-ce5d86ace5a9\") " Oct 11 05:08:09 crc kubenswrapper[4651]: I1011 05:08:09.743255 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/29d2e57f-1259-4f23-9e62-ce5d86ace5a9-public-tls-certs\") pod \"29d2e57f-1259-4f23-9e62-ce5d86ace5a9\" (UID: \"29d2e57f-1259-4f23-9e62-ce5d86ace5a9\") " Oct 11 05:08:09 crc kubenswrapper[4651]: I1011 05:08:09.743343 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/29d2e57f-1259-4f23-9e62-ce5d86ace5a9-config-data\") pod \"29d2e57f-1259-4f23-9e62-ce5d86ace5a9\" (UID: \"29d2e57f-1259-4f23-9e62-ce5d86ace5a9\") " Oct 11 05:08:09 crc kubenswrapper[4651]: I1011 05:08:09.743394 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/29d2e57f-1259-4f23-9e62-ce5d86ace5a9-httpd-run\") pod \"29d2e57f-1259-4f23-9e62-ce5d86ace5a9\" (UID: \"29d2e57f-1259-4f23-9e62-ce5d86ace5a9\") " Oct 11 05:08:09 crc kubenswrapper[4651]: I1011 05:08:09.743426 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rxmqr\" (UniqueName: \"kubernetes.io/projected/29d2e57f-1259-4f23-9e62-ce5d86ace5a9-kube-api-access-rxmqr\") pod \"29d2e57f-1259-4f23-9e62-ce5d86ace5a9\" (UID: \"29d2e57f-1259-4f23-9e62-ce5d86ace5a9\") " Oct 11 05:08:09 crc kubenswrapper[4651]: I1011 05:08:09.742479 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/29d2e57f-1259-4f23-9e62-ce5d86ace5a9-logs" (OuterVolumeSpecName: "logs") pod "29d2e57f-1259-4f23-9e62-ce5d86ace5a9" (UID: "29d2e57f-1259-4f23-9e62-ce5d86ace5a9"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 05:08:09 crc kubenswrapper[4651]: I1011 05:08:09.749171 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/29d2e57f-1259-4f23-9e62-ce5d86ace5a9-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "29d2e57f-1259-4f23-9e62-ce5d86ace5a9" (UID: "29d2e57f-1259-4f23-9e62-ce5d86ace5a9"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 05:08:09 crc kubenswrapper[4651]: I1011 05:08:09.753949 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/29d2e57f-1259-4f23-9e62-ce5d86ace5a9-kube-api-access-rxmqr" (OuterVolumeSpecName: "kube-api-access-rxmqr") pod "29d2e57f-1259-4f23-9e62-ce5d86ace5a9" (UID: "29d2e57f-1259-4f23-9e62-ce5d86ace5a9"). InnerVolumeSpecName "kube-api-access-rxmqr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:08:09 crc kubenswrapper[4651]: I1011 05:08:09.754080 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage10-crc" (OuterVolumeSpecName: "glance") pod "29d2e57f-1259-4f23-9e62-ce5d86ace5a9" (UID: "29d2e57f-1259-4f23-9e62-ce5d86ace5a9"). InnerVolumeSpecName "local-storage10-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 11 05:08:09 crc kubenswrapper[4651]: I1011 05:08:09.755906 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29d2e57f-1259-4f23-9e62-ce5d86ace5a9-scripts" (OuterVolumeSpecName: "scripts") pod "29d2e57f-1259-4f23-9e62-ce5d86ace5a9" (UID: "29d2e57f-1259-4f23-9e62-ce5d86ace5a9"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:08:09 crc kubenswrapper[4651]: I1011 05:08:09.772988 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29d2e57f-1259-4f23-9e62-ce5d86ace5a9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "29d2e57f-1259-4f23-9e62-ce5d86ace5a9" (UID: "29d2e57f-1259-4f23-9e62-ce5d86ace5a9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:08:09 crc kubenswrapper[4651]: I1011 05:08:09.814583 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29d2e57f-1259-4f23-9e62-ce5d86ace5a9-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "29d2e57f-1259-4f23-9e62-ce5d86ace5a9" (UID: "29d2e57f-1259-4f23-9e62-ce5d86ace5a9"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:08:09 crc kubenswrapper[4651]: I1011 05:08:09.818325 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29d2e57f-1259-4f23-9e62-ce5d86ace5a9-config-data" (OuterVolumeSpecName: "config-data") pod "29d2e57f-1259-4f23-9e62-ce5d86ace5a9" (UID: "29d2e57f-1259-4f23-9e62-ce5d86ace5a9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:08:09 crc kubenswrapper[4651]: I1011 05:08:09.845598 4651 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/29d2e57f-1259-4f23-9e62-ce5d86ace5a9-scripts\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:09 crc kubenswrapper[4651]: I1011 05:08:09.845641 4651 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/29d2e57f-1259-4f23-9e62-ce5d86ace5a9-public-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:09 crc kubenswrapper[4651]: I1011 05:08:09.845655 4651 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/29d2e57f-1259-4f23-9e62-ce5d86ace5a9-config-data\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:09 crc kubenswrapper[4651]: I1011 05:08:09.845665 4651 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/29d2e57f-1259-4f23-9e62-ce5d86ace5a9-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:09 crc kubenswrapper[4651]: I1011 05:08:09.845678 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rxmqr\" (UniqueName: \"kubernetes.io/projected/29d2e57f-1259-4f23-9e62-ce5d86ace5a9-kube-api-access-rxmqr\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:09 crc kubenswrapper[4651]: I1011 05:08:09.845689 4651 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/29d2e57f-1259-4f23-9e62-ce5d86ace5a9-logs\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:09 crc kubenswrapper[4651]: I1011 05:08:09.845699 4651 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29d2e57f-1259-4f23-9e62-ce5d86ace5a9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:09 crc kubenswrapper[4651]: I1011 05:08:09.845741 4651 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" " Oct 11 05:08:09 crc kubenswrapper[4651]: I1011 05:08:09.897527 4651 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage10-crc" (UniqueName: "kubernetes.io/local-volume/local-storage10-crc") on node "crc" Oct 11 05:08:09 crc kubenswrapper[4651]: I1011 05:08:09.950453 4651 reconciler_common.go:293] "Volume detached for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:09 crc kubenswrapper[4651]: I1011 05:08:09.988081 4651 generic.go:334] "Generic (PLEG): container finished" podID="65b0df64-2ebb-4c09-813e-1be5beb4e8ed" containerID="0de6bffe549422a38a213f3c9875e678a358eb25b7786bed61f425e999bfe54d" exitCode=0 Oct 11 05:08:09 crc kubenswrapper[4651]: I1011 05:08:09.988893 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5ccc5c4795-9pngt" event={"ID":"65b0df64-2ebb-4c09-813e-1be5beb4e8ed","Type":"ContainerDied","Data":"0de6bffe549422a38a213f3c9875e678a358eb25b7786bed61f425e999bfe54d"} Oct 11 05:08:09 crc kubenswrapper[4651]: I1011 05:08:09.989161 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5ccc5c4795-9pngt" event={"ID":"65b0df64-2ebb-4c09-813e-1be5beb4e8ed","Type":"ContainerStarted","Data":"6d7e49238432ea32813c6a28eb51edf1d4b7772c9c76eb20e3e1f2e95f6aaa45"} Oct 11 05:08:09 crc kubenswrapper[4651]: I1011 05:08:09.995939 4651 generic.go:334] "Generic (PLEG): container finished" podID="29d2e57f-1259-4f23-9e62-ce5d86ace5a9" containerID="c5b3a53861d8beac080545dd8f9b1a17ddefa4c84e423457e10c1e2c25563913" exitCode=143 Oct 11 05:08:09 crc kubenswrapper[4651]: I1011 05:08:09.995971 4651 generic.go:334] "Generic (PLEG): container finished" podID="29d2e57f-1259-4f23-9e62-ce5d86ace5a9" containerID="85ac2eecb7b62aa74139642ce4a97e46169099193040e7d16a8b4dfda52ef21e" exitCode=143 Oct 11 05:08:09 crc kubenswrapper[4651]: I1011 05:08:09.996008 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"29d2e57f-1259-4f23-9e62-ce5d86ace5a9","Type":"ContainerDied","Data":"c5b3a53861d8beac080545dd8f9b1a17ddefa4c84e423457e10c1e2c25563913"} Oct 11 05:08:09 crc kubenswrapper[4651]: I1011 05:08:09.996034 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"29d2e57f-1259-4f23-9e62-ce5d86ace5a9","Type":"ContainerDied","Data":"85ac2eecb7b62aa74139642ce4a97e46169099193040e7d16a8b4dfda52ef21e"} Oct 11 05:08:09 crc kubenswrapper[4651]: I1011 05:08:09.996042 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"29d2e57f-1259-4f23-9e62-ce5d86ace5a9","Type":"ContainerDied","Data":"61b32a0343d9676520f279051da4ed45bc56e2bc73b7ced4e447de8256032635"} Oct 11 05:08:09 crc kubenswrapper[4651]: I1011 05:08:09.996058 4651 scope.go:117] "RemoveContainer" containerID="c5b3a53861d8beac080545dd8f9b1a17ddefa4c84e423457e10c1e2c25563913" Oct 11 05:08:09 crc kubenswrapper[4651]: I1011 05:08:09.996158 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 11 05:08:10 crc kubenswrapper[4651]: I1011 05:08:10.004479 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-79fc6b7784-n2xpf" event={"ID":"84763f01-3ff4-49ae-a364-e54b62308ff0","Type":"ContainerStarted","Data":"ece7b2fef8409b80f241001509dfa5bc31212c2afe506cd86038c7f07a958996"} Oct 11 05:08:10 crc kubenswrapper[4651]: I1011 05:08:10.005720 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-79fc6b7784-n2xpf" event={"ID":"84763f01-3ff4-49ae-a364-e54b62308ff0","Type":"ContainerStarted","Data":"8b8c2de02d00e20dbafdc012e91c04ae2fc9629f92c9dd88462c168c33af4a90"} Oct 11 05:08:10 crc kubenswrapper[4651]: I1011 05:08:10.051182 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 11 05:08:10 crc kubenswrapper[4651]: I1011 05:08:10.053464 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 11 05:08:10 crc kubenswrapper[4651]: I1011 05:08:10.057082 4651 scope.go:117] "RemoveContainer" containerID="85ac2eecb7b62aa74139642ce4a97e46169099193040e7d16a8b4dfda52ef21e" Oct 11 05:08:10 crc kubenswrapper[4651]: I1011 05:08:10.104425 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Oct 11 05:08:10 crc kubenswrapper[4651]: E1011 05:08:10.104775 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29d2e57f-1259-4f23-9e62-ce5d86ace5a9" containerName="glance-httpd" Oct 11 05:08:10 crc kubenswrapper[4651]: I1011 05:08:10.104788 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="29d2e57f-1259-4f23-9e62-ce5d86ace5a9" containerName="glance-httpd" Oct 11 05:08:10 crc kubenswrapper[4651]: E1011 05:08:10.104808 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29d2e57f-1259-4f23-9e62-ce5d86ace5a9" containerName="glance-log" Oct 11 05:08:10 crc kubenswrapper[4651]: I1011 05:08:10.104834 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="29d2e57f-1259-4f23-9e62-ce5d86ace5a9" containerName="glance-log" Oct 11 05:08:10 crc kubenswrapper[4651]: I1011 05:08:10.104999 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="29d2e57f-1259-4f23-9e62-ce5d86ace5a9" containerName="glance-httpd" Oct 11 05:08:10 crc kubenswrapper[4651]: I1011 05:08:10.105021 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="29d2e57f-1259-4f23-9e62-ce5d86ace5a9" containerName="glance-log" Oct 11 05:08:10 crc kubenswrapper[4651]: I1011 05:08:10.120409 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 11 05:08:10 crc kubenswrapper[4651]: I1011 05:08:10.120652 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 11 05:08:10 crc kubenswrapper[4651]: I1011 05:08:10.128346 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Oct 11 05:08:10 crc kubenswrapper[4651]: I1011 05:08:10.128660 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Oct 11 05:08:10 crc kubenswrapper[4651]: I1011 05:08:10.255407 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6d67b157-9258-41f2-b942-bc5a54f1ea21-config-data\") pod \"glance-default-external-api-0\" (UID: \"6d67b157-9258-41f2-b942-bc5a54f1ea21\") " pod="openstack/glance-default-external-api-0" Oct 11 05:08:10 crc kubenswrapper[4651]: I1011 05:08:10.255496 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d67b157-9258-41f2-b942-bc5a54f1ea21-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"6d67b157-9258-41f2-b942-bc5a54f1ea21\") " pod="openstack/glance-default-external-api-0" Oct 11 05:08:10 crc kubenswrapper[4651]: I1011 05:08:10.255550 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"6d67b157-9258-41f2-b942-bc5a54f1ea21\") " pod="openstack/glance-default-external-api-0" Oct 11 05:08:10 crc kubenswrapper[4651]: I1011 05:08:10.255648 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6d67b157-9258-41f2-b942-bc5a54f1ea21-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"6d67b157-9258-41f2-b942-bc5a54f1ea21\") " pod="openstack/glance-default-external-api-0" Oct 11 05:08:10 crc kubenswrapper[4651]: I1011 05:08:10.255714 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6d67b157-9258-41f2-b942-bc5a54f1ea21-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"6d67b157-9258-41f2-b942-bc5a54f1ea21\") " pod="openstack/glance-default-external-api-0" Oct 11 05:08:10 crc kubenswrapper[4651]: I1011 05:08:10.255757 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fp9p5\" (UniqueName: \"kubernetes.io/projected/6d67b157-9258-41f2-b942-bc5a54f1ea21-kube-api-access-fp9p5\") pod \"glance-default-external-api-0\" (UID: \"6d67b157-9258-41f2-b942-bc5a54f1ea21\") " pod="openstack/glance-default-external-api-0" Oct 11 05:08:10 crc kubenswrapper[4651]: I1011 05:08:10.255898 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6d67b157-9258-41f2-b942-bc5a54f1ea21-scripts\") pod \"glance-default-external-api-0\" (UID: \"6d67b157-9258-41f2-b942-bc5a54f1ea21\") " pod="openstack/glance-default-external-api-0" Oct 11 05:08:10 crc kubenswrapper[4651]: I1011 05:08:10.255922 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6d67b157-9258-41f2-b942-bc5a54f1ea21-logs\") pod \"glance-default-external-api-0\" (UID: \"6d67b157-9258-41f2-b942-bc5a54f1ea21\") " pod="openstack/glance-default-external-api-0" Oct 11 05:08:10 crc kubenswrapper[4651]: I1011 05:08:10.357391 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fp9p5\" (UniqueName: \"kubernetes.io/projected/6d67b157-9258-41f2-b942-bc5a54f1ea21-kube-api-access-fp9p5\") pod \"glance-default-external-api-0\" (UID: \"6d67b157-9258-41f2-b942-bc5a54f1ea21\") " pod="openstack/glance-default-external-api-0" Oct 11 05:08:10 crc kubenswrapper[4651]: I1011 05:08:10.357754 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6d67b157-9258-41f2-b942-bc5a54f1ea21-scripts\") pod \"glance-default-external-api-0\" (UID: \"6d67b157-9258-41f2-b942-bc5a54f1ea21\") " pod="openstack/glance-default-external-api-0" Oct 11 05:08:10 crc kubenswrapper[4651]: I1011 05:08:10.357779 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6d67b157-9258-41f2-b942-bc5a54f1ea21-logs\") pod \"glance-default-external-api-0\" (UID: \"6d67b157-9258-41f2-b942-bc5a54f1ea21\") " pod="openstack/glance-default-external-api-0" Oct 11 05:08:10 crc kubenswrapper[4651]: I1011 05:08:10.357847 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6d67b157-9258-41f2-b942-bc5a54f1ea21-config-data\") pod \"glance-default-external-api-0\" (UID: \"6d67b157-9258-41f2-b942-bc5a54f1ea21\") " pod="openstack/glance-default-external-api-0" Oct 11 05:08:10 crc kubenswrapper[4651]: I1011 05:08:10.357904 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d67b157-9258-41f2-b942-bc5a54f1ea21-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"6d67b157-9258-41f2-b942-bc5a54f1ea21\") " pod="openstack/glance-default-external-api-0" Oct 11 05:08:10 crc kubenswrapper[4651]: I1011 05:08:10.357976 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"6d67b157-9258-41f2-b942-bc5a54f1ea21\") " pod="openstack/glance-default-external-api-0" Oct 11 05:08:10 crc kubenswrapper[4651]: I1011 05:08:10.358036 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6d67b157-9258-41f2-b942-bc5a54f1ea21-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"6d67b157-9258-41f2-b942-bc5a54f1ea21\") " pod="openstack/glance-default-external-api-0" Oct 11 05:08:10 crc kubenswrapper[4651]: I1011 05:08:10.358088 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6d67b157-9258-41f2-b942-bc5a54f1ea21-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"6d67b157-9258-41f2-b942-bc5a54f1ea21\") " pod="openstack/glance-default-external-api-0" Oct 11 05:08:10 crc kubenswrapper[4651]: I1011 05:08:10.359003 4651 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"6d67b157-9258-41f2-b942-bc5a54f1ea21\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/glance-default-external-api-0" Oct 11 05:08:10 crc kubenswrapper[4651]: I1011 05:08:10.359157 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6d67b157-9258-41f2-b942-bc5a54f1ea21-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"6d67b157-9258-41f2-b942-bc5a54f1ea21\") " pod="openstack/glance-default-external-api-0" Oct 11 05:08:10 crc kubenswrapper[4651]: I1011 05:08:10.359013 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6d67b157-9258-41f2-b942-bc5a54f1ea21-logs\") pod \"glance-default-external-api-0\" (UID: \"6d67b157-9258-41f2-b942-bc5a54f1ea21\") " pod="openstack/glance-default-external-api-0" Oct 11 05:08:10 crc kubenswrapper[4651]: I1011 05:08:10.363574 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6d67b157-9258-41f2-b942-bc5a54f1ea21-scripts\") pod \"glance-default-external-api-0\" (UID: \"6d67b157-9258-41f2-b942-bc5a54f1ea21\") " pod="openstack/glance-default-external-api-0" Oct 11 05:08:10 crc kubenswrapper[4651]: I1011 05:08:10.364868 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d67b157-9258-41f2-b942-bc5a54f1ea21-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"6d67b157-9258-41f2-b942-bc5a54f1ea21\") " pod="openstack/glance-default-external-api-0" Oct 11 05:08:10 crc kubenswrapper[4651]: I1011 05:08:10.365982 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6d67b157-9258-41f2-b942-bc5a54f1ea21-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"6d67b157-9258-41f2-b942-bc5a54f1ea21\") " pod="openstack/glance-default-external-api-0" Oct 11 05:08:10 crc kubenswrapper[4651]: I1011 05:08:10.366459 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6d67b157-9258-41f2-b942-bc5a54f1ea21-config-data\") pod \"glance-default-external-api-0\" (UID: \"6d67b157-9258-41f2-b942-bc5a54f1ea21\") " pod="openstack/glance-default-external-api-0" Oct 11 05:08:10 crc kubenswrapper[4651]: I1011 05:08:10.373457 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fp9p5\" (UniqueName: \"kubernetes.io/projected/6d67b157-9258-41f2-b942-bc5a54f1ea21-kube-api-access-fp9p5\") pod \"glance-default-external-api-0\" (UID: \"6d67b157-9258-41f2-b942-bc5a54f1ea21\") " pod="openstack/glance-default-external-api-0" Oct 11 05:08:10 crc kubenswrapper[4651]: I1011 05:08:10.392039 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"6d67b157-9258-41f2-b942-bc5a54f1ea21\") " pod="openstack/glance-default-external-api-0" Oct 11 05:08:10 crc kubenswrapper[4651]: I1011 05:08:10.520452 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 11 05:08:10 crc kubenswrapper[4651]: I1011 05:08:10.622790 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-6856c774b5-fq9r6"] Oct 11 05:08:10 crc kubenswrapper[4651]: I1011 05:08:10.625446 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6856c774b5-fq9r6" Oct 11 05:08:10 crc kubenswrapper[4651]: I1011 05:08:10.629426 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Oct 11 05:08:10 crc kubenswrapper[4651]: I1011 05:08:10.630004 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Oct 11 05:08:10 crc kubenswrapper[4651]: I1011 05:08:10.642845 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-6856c774b5-fq9r6"] Oct 11 05:08:10 crc kubenswrapper[4651]: I1011 05:08:10.666974 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pnzcr\" (UniqueName: \"kubernetes.io/projected/301b63d9-53a7-49b2-9d71-2b2bf854de89-kube-api-access-pnzcr\") pod \"neutron-6856c774b5-fq9r6\" (UID: \"301b63d9-53a7-49b2-9d71-2b2bf854de89\") " pod="openstack/neutron-6856c774b5-fq9r6" Oct 11 05:08:10 crc kubenswrapper[4651]: I1011 05:08:10.667558 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/301b63d9-53a7-49b2-9d71-2b2bf854de89-config\") pod \"neutron-6856c774b5-fq9r6\" (UID: \"301b63d9-53a7-49b2-9d71-2b2bf854de89\") " pod="openstack/neutron-6856c774b5-fq9r6" Oct 11 05:08:10 crc kubenswrapper[4651]: I1011 05:08:10.667755 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/301b63d9-53a7-49b2-9d71-2b2bf854de89-internal-tls-certs\") pod \"neutron-6856c774b5-fq9r6\" (UID: \"301b63d9-53a7-49b2-9d71-2b2bf854de89\") " pod="openstack/neutron-6856c774b5-fq9r6" Oct 11 05:08:10 crc kubenswrapper[4651]: I1011 05:08:10.667928 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/301b63d9-53a7-49b2-9d71-2b2bf854de89-public-tls-certs\") pod \"neutron-6856c774b5-fq9r6\" (UID: \"301b63d9-53a7-49b2-9d71-2b2bf854de89\") " pod="openstack/neutron-6856c774b5-fq9r6" Oct 11 05:08:10 crc kubenswrapper[4651]: I1011 05:08:10.668159 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/301b63d9-53a7-49b2-9d71-2b2bf854de89-httpd-config\") pod \"neutron-6856c774b5-fq9r6\" (UID: \"301b63d9-53a7-49b2-9d71-2b2bf854de89\") " pod="openstack/neutron-6856c774b5-fq9r6" Oct 11 05:08:10 crc kubenswrapper[4651]: I1011 05:08:10.668461 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/301b63d9-53a7-49b2-9d71-2b2bf854de89-ovndb-tls-certs\") pod \"neutron-6856c774b5-fq9r6\" (UID: \"301b63d9-53a7-49b2-9d71-2b2bf854de89\") " pod="openstack/neutron-6856c774b5-fq9r6" Oct 11 05:08:10 crc kubenswrapper[4651]: I1011 05:08:10.668520 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/301b63d9-53a7-49b2-9d71-2b2bf854de89-combined-ca-bundle\") pod \"neutron-6856c774b5-fq9r6\" (UID: \"301b63d9-53a7-49b2-9d71-2b2bf854de89\") " pod="openstack/neutron-6856c774b5-fq9r6" Oct 11 05:08:10 crc kubenswrapper[4651]: I1011 05:08:10.734133 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-75955bb549-s67lw" Oct 11 05:08:10 crc kubenswrapper[4651]: I1011 05:08:10.771762 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pnzcr\" (UniqueName: \"kubernetes.io/projected/301b63d9-53a7-49b2-9d71-2b2bf854de89-kube-api-access-pnzcr\") pod \"neutron-6856c774b5-fq9r6\" (UID: \"301b63d9-53a7-49b2-9d71-2b2bf854de89\") " pod="openstack/neutron-6856c774b5-fq9r6" Oct 11 05:08:10 crc kubenswrapper[4651]: I1011 05:08:10.771811 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/301b63d9-53a7-49b2-9d71-2b2bf854de89-config\") pod \"neutron-6856c774b5-fq9r6\" (UID: \"301b63d9-53a7-49b2-9d71-2b2bf854de89\") " pod="openstack/neutron-6856c774b5-fq9r6" Oct 11 05:08:10 crc kubenswrapper[4651]: I1011 05:08:10.771872 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/301b63d9-53a7-49b2-9d71-2b2bf854de89-internal-tls-certs\") pod \"neutron-6856c774b5-fq9r6\" (UID: \"301b63d9-53a7-49b2-9d71-2b2bf854de89\") " pod="openstack/neutron-6856c774b5-fq9r6" Oct 11 05:08:10 crc kubenswrapper[4651]: I1011 05:08:10.771898 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/301b63d9-53a7-49b2-9d71-2b2bf854de89-public-tls-certs\") pod \"neutron-6856c774b5-fq9r6\" (UID: \"301b63d9-53a7-49b2-9d71-2b2bf854de89\") " pod="openstack/neutron-6856c774b5-fq9r6" Oct 11 05:08:10 crc kubenswrapper[4651]: I1011 05:08:10.771947 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/301b63d9-53a7-49b2-9d71-2b2bf854de89-httpd-config\") pod \"neutron-6856c774b5-fq9r6\" (UID: \"301b63d9-53a7-49b2-9d71-2b2bf854de89\") " pod="openstack/neutron-6856c774b5-fq9r6" Oct 11 05:08:10 crc kubenswrapper[4651]: I1011 05:08:10.772023 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/301b63d9-53a7-49b2-9d71-2b2bf854de89-ovndb-tls-certs\") pod \"neutron-6856c774b5-fq9r6\" (UID: \"301b63d9-53a7-49b2-9d71-2b2bf854de89\") " pod="openstack/neutron-6856c774b5-fq9r6" Oct 11 05:08:10 crc kubenswrapper[4651]: I1011 05:08:10.772045 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/301b63d9-53a7-49b2-9d71-2b2bf854de89-combined-ca-bundle\") pod \"neutron-6856c774b5-fq9r6\" (UID: \"301b63d9-53a7-49b2-9d71-2b2bf854de89\") " pod="openstack/neutron-6856c774b5-fq9r6" Oct 11 05:08:10 crc kubenswrapper[4651]: I1011 05:08:10.775607 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/301b63d9-53a7-49b2-9d71-2b2bf854de89-combined-ca-bundle\") pod \"neutron-6856c774b5-fq9r6\" (UID: \"301b63d9-53a7-49b2-9d71-2b2bf854de89\") " pod="openstack/neutron-6856c774b5-fq9r6" Oct 11 05:08:10 crc kubenswrapper[4651]: I1011 05:08:10.776396 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/301b63d9-53a7-49b2-9d71-2b2bf854de89-internal-tls-certs\") pod \"neutron-6856c774b5-fq9r6\" (UID: \"301b63d9-53a7-49b2-9d71-2b2bf854de89\") " pod="openstack/neutron-6856c774b5-fq9r6" Oct 11 05:08:10 crc kubenswrapper[4651]: I1011 05:08:10.777101 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/301b63d9-53a7-49b2-9d71-2b2bf854de89-ovndb-tls-certs\") pod \"neutron-6856c774b5-fq9r6\" (UID: \"301b63d9-53a7-49b2-9d71-2b2bf854de89\") " pod="openstack/neutron-6856c774b5-fq9r6" Oct 11 05:08:10 crc kubenswrapper[4651]: I1011 05:08:10.780117 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/301b63d9-53a7-49b2-9d71-2b2bf854de89-httpd-config\") pod \"neutron-6856c774b5-fq9r6\" (UID: \"301b63d9-53a7-49b2-9d71-2b2bf854de89\") " pod="openstack/neutron-6856c774b5-fq9r6" Oct 11 05:08:10 crc kubenswrapper[4651]: I1011 05:08:10.780544 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/301b63d9-53a7-49b2-9d71-2b2bf854de89-public-tls-certs\") pod \"neutron-6856c774b5-fq9r6\" (UID: \"301b63d9-53a7-49b2-9d71-2b2bf854de89\") " pod="openstack/neutron-6856c774b5-fq9r6" Oct 11 05:08:10 crc kubenswrapper[4651]: I1011 05:08:10.780753 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/301b63d9-53a7-49b2-9d71-2b2bf854de89-config\") pod \"neutron-6856c774b5-fq9r6\" (UID: \"301b63d9-53a7-49b2-9d71-2b2bf854de89\") " pod="openstack/neutron-6856c774b5-fq9r6" Oct 11 05:08:10 crc kubenswrapper[4651]: I1011 05:08:10.789154 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pnzcr\" (UniqueName: \"kubernetes.io/projected/301b63d9-53a7-49b2-9d71-2b2bf854de89-kube-api-access-pnzcr\") pod \"neutron-6856c774b5-fq9r6\" (UID: \"301b63d9-53a7-49b2-9d71-2b2bf854de89\") " pod="openstack/neutron-6856c774b5-fq9r6" Oct 11 05:08:10 crc kubenswrapper[4651]: I1011 05:08:10.968721 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6856c774b5-fq9r6" Oct 11 05:08:11 crc kubenswrapper[4651]: I1011 05:08:11.882661 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="29d2e57f-1259-4f23-9e62-ce5d86ace5a9" path="/var/lib/kubelet/pods/29d2e57f-1259-4f23-9e62-ce5d86ace5a9/volumes" Oct 11 05:08:13 crc kubenswrapper[4651]: I1011 05:08:13.118625 4651 scope.go:117] "RemoveContainer" containerID="c5b3a53861d8beac080545dd8f9b1a17ddefa4c84e423457e10c1e2c25563913" Oct 11 05:08:13 crc kubenswrapper[4651]: E1011 05:08:13.119459 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c5b3a53861d8beac080545dd8f9b1a17ddefa4c84e423457e10c1e2c25563913\": container with ID starting with c5b3a53861d8beac080545dd8f9b1a17ddefa4c84e423457e10c1e2c25563913 not found: ID does not exist" containerID="c5b3a53861d8beac080545dd8f9b1a17ddefa4c84e423457e10c1e2c25563913" Oct 11 05:08:13 crc kubenswrapper[4651]: I1011 05:08:13.119514 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c5b3a53861d8beac080545dd8f9b1a17ddefa4c84e423457e10c1e2c25563913"} err="failed to get container status \"c5b3a53861d8beac080545dd8f9b1a17ddefa4c84e423457e10c1e2c25563913\": rpc error: code = NotFound desc = could not find container \"c5b3a53861d8beac080545dd8f9b1a17ddefa4c84e423457e10c1e2c25563913\": container with ID starting with c5b3a53861d8beac080545dd8f9b1a17ddefa4c84e423457e10c1e2c25563913 not found: ID does not exist" Oct 11 05:08:13 crc kubenswrapper[4651]: I1011 05:08:13.119543 4651 scope.go:117] "RemoveContainer" containerID="85ac2eecb7b62aa74139642ce4a97e46169099193040e7d16a8b4dfda52ef21e" Oct 11 05:08:13 crc kubenswrapper[4651]: E1011 05:08:13.128734 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"85ac2eecb7b62aa74139642ce4a97e46169099193040e7d16a8b4dfda52ef21e\": container with ID starting with 85ac2eecb7b62aa74139642ce4a97e46169099193040e7d16a8b4dfda52ef21e not found: ID does not exist" containerID="85ac2eecb7b62aa74139642ce4a97e46169099193040e7d16a8b4dfda52ef21e" Oct 11 05:08:13 crc kubenswrapper[4651]: I1011 05:08:13.128788 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"85ac2eecb7b62aa74139642ce4a97e46169099193040e7d16a8b4dfda52ef21e"} err="failed to get container status \"85ac2eecb7b62aa74139642ce4a97e46169099193040e7d16a8b4dfda52ef21e\": rpc error: code = NotFound desc = could not find container \"85ac2eecb7b62aa74139642ce4a97e46169099193040e7d16a8b4dfda52ef21e\": container with ID starting with 85ac2eecb7b62aa74139642ce4a97e46169099193040e7d16a8b4dfda52ef21e not found: ID does not exist" Oct 11 05:08:13 crc kubenswrapper[4651]: I1011 05:08:13.128844 4651 scope.go:117] "RemoveContainer" containerID="c5b3a53861d8beac080545dd8f9b1a17ddefa4c84e423457e10c1e2c25563913" Oct 11 05:08:13 crc kubenswrapper[4651]: I1011 05:08:13.129196 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c5b3a53861d8beac080545dd8f9b1a17ddefa4c84e423457e10c1e2c25563913"} err="failed to get container status \"c5b3a53861d8beac080545dd8f9b1a17ddefa4c84e423457e10c1e2c25563913\": rpc error: code = NotFound desc = could not find container \"c5b3a53861d8beac080545dd8f9b1a17ddefa4c84e423457e10c1e2c25563913\": container with ID starting with c5b3a53861d8beac080545dd8f9b1a17ddefa4c84e423457e10c1e2c25563913 not found: ID does not exist" Oct 11 05:08:13 crc kubenswrapper[4651]: I1011 05:08:13.129221 4651 scope.go:117] "RemoveContainer" containerID="85ac2eecb7b62aa74139642ce4a97e46169099193040e7d16a8b4dfda52ef21e" Oct 11 05:08:13 crc kubenswrapper[4651]: I1011 05:08:13.129530 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"85ac2eecb7b62aa74139642ce4a97e46169099193040e7d16a8b4dfda52ef21e"} err="failed to get container status \"85ac2eecb7b62aa74139642ce4a97e46169099193040e7d16a8b4dfda52ef21e\": rpc error: code = NotFound desc = could not find container \"85ac2eecb7b62aa74139642ce4a97e46169099193040e7d16a8b4dfda52ef21e\": container with ID starting with 85ac2eecb7b62aa74139642ce4a97e46169099193040e7d16a8b4dfda52ef21e not found: ID does not exist" Oct 11 05:08:13 crc kubenswrapper[4651]: I1011 05:08:13.793660 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-6856c774b5-fq9r6"] Oct 11 05:08:13 crc kubenswrapper[4651]: W1011 05:08:13.799654 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod301b63d9_53a7_49b2_9d71_2b2bf854de89.slice/crio-f294b62a01dd597d13e059b2959367848704aecb88b0567d1de9a27cd1cfe26c WatchSource:0}: Error finding container f294b62a01dd597d13e059b2959367848704aecb88b0567d1de9a27cd1cfe26c: Status 404 returned error can't find the container with id f294b62a01dd597d13e059b2959367848704aecb88b0567d1de9a27cd1cfe26c Oct 11 05:08:13 crc kubenswrapper[4651]: I1011 05:08:13.893391 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 11 05:08:14 crc kubenswrapper[4651]: I1011 05:08:14.080165 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"6d67b157-9258-41f2-b942-bc5a54f1ea21","Type":"ContainerStarted","Data":"c24aef318ac682de54e21143be63340dc533dc25aa895d7f3e56c5ab5b8c0108"} Oct 11 05:08:14 crc kubenswrapper[4651]: I1011 05:08:14.082877 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"f2d90a7b-5995-43a7-8c9b-05639ac9e9fe","Type":"ContainerStarted","Data":"b060d41a176950323808f226ba4155e249c40327f219166e9b7bf88a1ffe963f"} Oct 11 05:08:14 crc kubenswrapper[4651]: I1011 05:08:14.083007 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="f2d90a7b-5995-43a7-8c9b-05639ac9e9fe" containerName="glance-log" containerID="cri-o://41f4eb004f48b1d6cabf3fe68dbf9b05944491f79be5cfb2f75ae208b8e80204" gracePeriod=30 Oct 11 05:08:14 crc kubenswrapper[4651]: I1011 05:08:14.083303 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="f2d90a7b-5995-43a7-8c9b-05639ac9e9fe" containerName="glance-httpd" containerID="cri-o://b060d41a176950323808f226ba4155e249c40327f219166e9b7bf88a1ffe963f" gracePeriod=30 Oct 11 05:08:14 crc kubenswrapper[4651]: I1011 05:08:14.088664 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6856c774b5-fq9r6" event={"ID":"301b63d9-53a7-49b2-9d71-2b2bf854de89","Type":"ContainerStarted","Data":"f294b62a01dd597d13e059b2959367848704aecb88b0567d1de9a27cd1cfe26c"} Oct 11 05:08:14 crc kubenswrapper[4651]: I1011 05:08:14.096659 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-79fc6b7784-n2xpf" event={"ID":"84763f01-3ff4-49ae-a364-e54b62308ff0","Type":"ContainerStarted","Data":"2588ea3b17498f8c9979d7197450c91ed307a1b5ddb07ef32f2476c9b05053af"} Oct 11 05:08:14 crc kubenswrapper[4651]: I1011 05:08:14.100796 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=32.100779493 podStartE2EDuration="32.100779493s" podCreationTimestamp="2025-10-11 05:07:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 05:08:14.100009673 +0000 UTC m=+1014.996242489" watchObservedRunningTime="2025-10-11 05:08:14.100779493 +0000 UTC m=+1014.997012289" Oct 11 05:08:14 crc kubenswrapper[4651]: I1011 05:08:14.118511 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5ccc5c4795-9pngt" event={"ID":"65b0df64-2ebb-4c09-813e-1be5beb4e8ed","Type":"ContainerStarted","Data":"68a8a6f2bea783b912ab6323eae94718961e555c71ee717def8ada8bfac784c1"} Oct 11 05:08:15 crc kubenswrapper[4651]: I1011 05:08:15.134589 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"6d67b157-9258-41f2-b942-bc5a54f1ea21","Type":"ContainerStarted","Data":"b35b779a90a8d682f090c62733e4802b34318f19145eca63e6fd686fa49970b1"} Oct 11 05:08:15 crc kubenswrapper[4651]: I1011 05:08:15.139206 4651 generic.go:334] "Generic (PLEG): container finished" podID="f2d90a7b-5995-43a7-8c9b-05639ac9e9fe" containerID="b060d41a176950323808f226ba4155e249c40327f219166e9b7bf88a1ffe963f" exitCode=0 Oct 11 05:08:15 crc kubenswrapper[4651]: I1011 05:08:15.139234 4651 generic.go:334] "Generic (PLEG): container finished" podID="f2d90a7b-5995-43a7-8c9b-05639ac9e9fe" containerID="41f4eb004f48b1d6cabf3fe68dbf9b05944491f79be5cfb2f75ae208b8e80204" exitCode=143 Oct 11 05:08:15 crc kubenswrapper[4651]: I1011 05:08:15.139269 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"f2d90a7b-5995-43a7-8c9b-05639ac9e9fe","Type":"ContainerDied","Data":"b060d41a176950323808f226ba4155e249c40327f219166e9b7bf88a1ffe963f"} Oct 11 05:08:15 crc kubenswrapper[4651]: I1011 05:08:15.139293 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"f2d90a7b-5995-43a7-8c9b-05639ac9e9fe","Type":"ContainerDied","Data":"41f4eb004f48b1d6cabf3fe68dbf9b05944491f79be5cfb2f75ae208b8e80204"} Oct 11 05:08:15 crc kubenswrapper[4651]: I1011 05:08:15.141902 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6856c774b5-fq9r6" event={"ID":"301b63d9-53a7-49b2-9d71-2b2bf854de89","Type":"ContainerStarted","Data":"cefac8b8c10f54e5221d44b5cf36181812ef41b44d92bc6a19c392ca9bf49906"} Oct 11 05:08:15 crc kubenswrapper[4651]: I1011 05:08:15.142379 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5ccc5c4795-9pngt" Oct 11 05:08:15 crc kubenswrapper[4651]: I1011 05:08:15.142507 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-79fc6b7784-n2xpf" Oct 11 05:08:15 crc kubenswrapper[4651]: I1011 05:08:15.167065 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5ccc5c4795-9pngt" podStartSLOduration=8.167047545 podStartE2EDuration="8.167047545s" podCreationTimestamp="2025-10-11 05:08:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 05:08:15.157076442 +0000 UTC m=+1016.053309248" watchObservedRunningTime="2025-10-11 05:08:15.167047545 +0000 UTC m=+1016.063280341" Oct 11 05:08:16 crc kubenswrapper[4651]: I1011 05:08:16.163085 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"f2d90a7b-5995-43a7-8c9b-05639ac9e9fe","Type":"ContainerDied","Data":"a0f5491efa2fa93221aa88d4f90d9064e78a2f60e0b3eb331b8106d259391f2d"} Oct 11 05:08:16 crc kubenswrapper[4651]: I1011 05:08:16.163419 4651 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a0f5491efa2fa93221aa88d4f90d9064e78a2f60e0b3eb331b8106d259391f2d" Oct 11 05:08:16 crc kubenswrapper[4651]: I1011 05:08:16.167006 4651 generic.go:334] "Generic (PLEG): container finished" podID="261ffa67-4305-4260-903d-93b8af576721" containerID="5fdd10400e07e88a06a5fdb658a6364edb433042dc71c45303e4ee46efa5fd26" exitCode=0 Oct 11 05:08:16 crc kubenswrapper[4651]: I1011 05:08:16.168144 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-9rf4f" event={"ID":"261ffa67-4305-4260-903d-93b8af576721","Type":"ContainerDied","Data":"5fdd10400e07e88a06a5fdb658a6364edb433042dc71c45303e4ee46efa5fd26"} Oct 11 05:08:16 crc kubenswrapper[4651]: I1011 05:08:16.185038 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-79fc6b7784-n2xpf" podStartSLOduration=8.185020699 podStartE2EDuration="8.185020699s" podCreationTimestamp="2025-10-11 05:08:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 05:08:15.182998751 +0000 UTC m=+1016.079231557" watchObservedRunningTime="2025-10-11 05:08:16.185020699 +0000 UTC m=+1017.081253495" Oct 11 05:08:16 crc kubenswrapper[4651]: I1011 05:08:16.193242 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-68976f6bc6-9jl66" Oct 11 05:08:16 crc kubenswrapper[4651]: I1011 05:08:16.193495 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-68976f6bc6-9jl66" Oct 11 05:08:16 crc kubenswrapper[4651]: I1011 05:08:16.207679 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 11 05:08:16 crc kubenswrapper[4651]: I1011 05:08:16.308167 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h8vhg\" (UniqueName: \"kubernetes.io/projected/f2d90a7b-5995-43a7-8c9b-05639ac9e9fe-kube-api-access-h8vhg\") pod \"f2d90a7b-5995-43a7-8c9b-05639ac9e9fe\" (UID: \"f2d90a7b-5995-43a7-8c9b-05639ac9e9fe\") " Oct 11 05:08:16 crc kubenswrapper[4651]: I1011 05:08:16.308242 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f2d90a7b-5995-43a7-8c9b-05639ac9e9fe-internal-tls-certs\") pod \"f2d90a7b-5995-43a7-8c9b-05639ac9e9fe\" (UID: \"f2d90a7b-5995-43a7-8c9b-05639ac9e9fe\") " Oct 11 05:08:16 crc kubenswrapper[4651]: I1011 05:08:16.308387 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"f2d90a7b-5995-43a7-8c9b-05639ac9e9fe\" (UID: \"f2d90a7b-5995-43a7-8c9b-05639ac9e9fe\") " Oct 11 05:08:16 crc kubenswrapper[4651]: I1011 05:08:16.309445 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f2d90a7b-5995-43a7-8c9b-05639ac9e9fe-config-data\") pod \"f2d90a7b-5995-43a7-8c9b-05639ac9e9fe\" (UID: \"f2d90a7b-5995-43a7-8c9b-05639ac9e9fe\") " Oct 11 05:08:16 crc kubenswrapper[4651]: I1011 05:08:16.309529 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f2d90a7b-5995-43a7-8c9b-05639ac9e9fe-httpd-run\") pod \"f2d90a7b-5995-43a7-8c9b-05639ac9e9fe\" (UID: \"f2d90a7b-5995-43a7-8c9b-05639ac9e9fe\") " Oct 11 05:08:16 crc kubenswrapper[4651]: I1011 05:08:16.309561 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f2d90a7b-5995-43a7-8c9b-05639ac9e9fe-logs\") pod \"f2d90a7b-5995-43a7-8c9b-05639ac9e9fe\" (UID: \"f2d90a7b-5995-43a7-8c9b-05639ac9e9fe\") " Oct 11 05:08:16 crc kubenswrapper[4651]: I1011 05:08:16.309584 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2d90a7b-5995-43a7-8c9b-05639ac9e9fe-combined-ca-bundle\") pod \"f2d90a7b-5995-43a7-8c9b-05639ac9e9fe\" (UID: \"f2d90a7b-5995-43a7-8c9b-05639ac9e9fe\") " Oct 11 05:08:16 crc kubenswrapper[4651]: I1011 05:08:16.309656 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f2d90a7b-5995-43a7-8c9b-05639ac9e9fe-scripts\") pod \"f2d90a7b-5995-43a7-8c9b-05639ac9e9fe\" (UID: \"f2d90a7b-5995-43a7-8c9b-05639ac9e9fe\") " Oct 11 05:08:16 crc kubenswrapper[4651]: I1011 05:08:16.312343 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f2d90a7b-5995-43a7-8c9b-05639ac9e9fe-kube-api-access-h8vhg" (OuterVolumeSpecName: "kube-api-access-h8vhg") pod "f2d90a7b-5995-43a7-8c9b-05639ac9e9fe" (UID: "f2d90a7b-5995-43a7-8c9b-05639ac9e9fe"). InnerVolumeSpecName "kube-api-access-h8vhg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:08:16 crc kubenswrapper[4651]: I1011 05:08:16.315400 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f2d90a7b-5995-43a7-8c9b-05639ac9e9fe-logs" (OuterVolumeSpecName: "logs") pod "f2d90a7b-5995-43a7-8c9b-05639ac9e9fe" (UID: "f2d90a7b-5995-43a7-8c9b-05639ac9e9fe"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 05:08:16 crc kubenswrapper[4651]: I1011 05:08:16.315423 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f2d90a7b-5995-43a7-8c9b-05639ac9e9fe-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "f2d90a7b-5995-43a7-8c9b-05639ac9e9fe" (UID: "f2d90a7b-5995-43a7-8c9b-05639ac9e9fe"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 05:08:16 crc kubenswrapper[4651]: I1011 05:08:16.317044 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f2d90a7b-5995-43a7-8c9b-05639ac9e9fe-scripts" (OuterVolumeSpecName: "scripts") pod "f2d90a7b-5995-43a7-8c9b-05639ac9e9fe" (UID: "f2d90a7b-5995-43a7-8c9b-05639ac9e9fe"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:08:16 crc kubenswrapper[4651]: I1011 05:08:16.317830 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage02-crc" (OuterVolumeSpecName: "glance") pod "f2d90a7b-5995-43a7-8c9b-05639ac9e9fe" (UID: "f2d90a7b-5995-43a7-8c9b-05639ac9e9fe"). InnerVolumeSpecName "local-storage02-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 11 05:08:16 crc kubenswrapper[4651]: I1011 05:08:16.334424 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f2d90a7b-5995-43a7-8c9b-05639ac9e9fe-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f2d90a7b-5995-43a7-8c9b-05639ac9e9fe" (UID: "f2d90a7b-5995-43a7-8c9b-05639ac9e9fe"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:08:16 crc kubenswrapper[4651]: I1011 05:08:16.336019 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-5f7d84485b-zb5s7" Oct 11 05:08:16 crc kubenswrapper[4651]: I1011 05:08:16.336047 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-5f7d84485b-zb5s7" Oct 11 05:08:16 crc kubenswrapper[4651]: I1011 05:08:16.358957 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f2d90a7b-5995-43a7-8c9b-05639ac9e9fe-config-data" (OuterVolumeSpecName: "config-data") pod "f2d90a7b-5995-43a7-8c9b-05639ac9e9fe" (UID: "f2d90a7b-5995-43a7-8c9b-05639ac9e9fe"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:08:16 crc kubenswrapper[4651]: I1011 05:08:16.379462 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f2d90a7b-5995-43a7-8c9b-05639ac9e9fe-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "f2d90a7b-5995-43a7-8c9b-05639ac9e9fe" (UID: "f2d90a7b-5995-43a7-8c9b-05639ac9e9fe"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:08:16 crc kubenswrapper[4651]: I1011 05:08:16.421052 4651 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f2d90a7b-5995-43a7-8c9b-05639ac9e9fe-scripts\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:16 crc kubenswrapper[4651]: I1011 05:08:16.421082 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h8vhg\" (UniqueName: \"kubernetes.io/projected/f2d90a7b-5995-43a7-8c9b-05639ac9e9fe-kube-api-access-h8vhg\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:16 crc kubenswrapper[4651]: I1011 05:08:16.421091 4651 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f2d90a7b-5995-43a7-8c9b-05639ac9e9fe-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:16 crc kubenswrapper[4651]: I1011 05:08:16.421109 4651 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" " Oct 11 05:08:16 crc kubenswrapper[4651]: I1011 05:08:16.421121 4651 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f2d90a7b-5995-43a7-8c9b-05639ac9e9fe-config-data\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:16 crc kubenswrapper[4651]: I1011 05:08:16.421129 4651 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f2d90a7b-5995-43a7-8c9b-05639ac9e9fe-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:16 crc kubenswrapper[4651]: I1011 05:08:16.421139 4651 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f2d90a7b-5995-43a7-8c9b-05639ac9e9fe-logs\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:16 crc kubenswrapper[4651]: I1011 05:08:16.421147 4651 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2d90a7b-5995-43a7-8c9b-05639ac9e9fe-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:16 crc kubenswrapper[4651]: I1011 05:08:16.453838 4651 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage02-crc" (UniqueName: "kubernetes.io/local-volume/local-storage02-crc") on node "crc" Oct 11 05:08:16 crc kubenswrapper[4651]: I1011 05:08:16.523013 4651 reconciler_common.go:293] "Volume detached for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:17 crc kubenswrapper[4651]: I1011 05:08:17.175933 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 11 05:08:17 crc kubenswrapper[4651]: I1011 05:08:17.244543 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 11 05:08:17 crc kubenswrapper[4651]: I1011 05:08:17.258628 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 11 05:08:17 crc kubenswrapper[4651]: I1011 05:08:17.305928 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 11 05:08:17 crc kubenswrapper[4651]: E1011 05:08:17.306481 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f2d90a7b-5995-43a7-8c9b-05639ac9e9fe" containerName="glance-httpd" Oct 11 05:08:17 crc kubenswrapper[4651]: I1011 05:08:17.306508 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="f2d90a7b-5995-43a7-8c9b-05639ac9e9fe" containerName="glance-httpd" Oct 11 05:08:17 crc kubenswrapper[4651]: E1011 05:08:17.306556 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f2d90a7b-5995-43a7-8c9b-05639ac9e9fe" containerName="glance-log" Oct 11 05:08:17 crc kubenswrapper[4651]: I1011 05:08:17.306564 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="f2d90a7b-5995-43a7-8c9b-05639ac9e9fe" containerName="glance-log" Oct 11 05:08:17 crc kubenswrapper[4651]: I1011 05:08:17.306980 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="f2d90a7b-5995-43a7-8c9b-05639ac9e9fe" containerName="glance-httpd" Oct 11 05:08:17 crc kubenswrapper[4651]: I1011 05:08:17.307014 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="f2d90a7b-5995-43a7-8c9b-05639ac9e9fe" containerName="glance-log" Oct 11 05:08:17 crc kubenswrapper[4651]: I1011 05:08:17.308208 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 11 05:08:17 crc kubenswrapper[4651]: I1011 05:08:17.313214 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Oct 11 05:08:17 crc kubenswrapper[4651]: I1011 05:08:17.313369 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Oct 11 05:08:17 crc kubenswrapper[4651]: I1011 05:08:17.317201 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 11 05:08:17 crc kubenswrapper[4651]: I1011 05:08:17.442805 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-internal-api-0\" (UID: \"5c04eb41-eb14-4687-a0fa-56f07612da15\") " pod="openstack/glance-default-internal-api-0" Oct 11 05:08:17 crc kubenswrapper[4651]: I1011 05:08:17.443523 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5c04eb41-eb14-4687-a0fa-56f07612da15-logs\") pod \"glance-default-internal-api-0\" (UID: \"5c04eb41-eb14-4687-a0fa-56f07612da15\") " pod="openstack/glance-default-internal-api-0" Oct 11 05:08:17 crc kubenswrapper[4651]: I1011 05:08:17.443566 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5c04eb41-eb14-4687-a0fa-56f07612da15-scripts\") pod \"glance-default-internal-api-0\" (UID: \"5c04eb41-eb14-4687-a0fa-56f07612da15\") " pod="openstack/glance-default-internal-api-0" Oct 11 05:08:17 crc kubenswrapper[4651]: I1011 05:08:17.443606 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c04eb41-eb14-4687-a0fa-56f07612da15-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"5c04eb41-eb14-4687-a0fa-56f07612da15\") " pod="openstack/glance-default-internal-api-0" Oct 11 05:08:17 crc kubenswrapper[4651]: I1011 05:08:17.443678 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5c04eb41-eb14-4687-a0fa-56f07612da15-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"5c04eb41-eb14-4687-a0fa-56f07612da15\") " pod="openstack/glance-default-internal-api-0" Oct 11 05:08:17 crc kubenswrapper[4651]: I1011 05:08:17.443749 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b4t6z\" (UniqueName: \"kubernetes.io/projected/5c04eb41-eb14-4687-a0fa-56f07612da15-kube-api-access-b4t6z\") pod \"glance-default-internal-api-0\" (UID: \"5c04eb41-eb14-4687-a0fa-56f07612da15\") " pod="openstack/glance-default-internal-api-0" Oct 11 05:08:17 crc kubenswrapper[4651]: I1011 05:08:17.443787 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5c04eb41-eb14-4687-a0fa-56f07612da15-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"5c04eb41-eb14-4687-a0fa-56f07612da15\") " pod="openstack/glance-default-internal-api-0" Oct 11 05:08:17 crc kubenswrapper[4651]: I1011 05:08:17.443907 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c04eb41-eb14-4687-a0fa-56f07612da15-config-data\") pod \"glance-default-internal-api-0\" (UID: \"5c04eb41-eb14-4687-a0fa-56f07612da15\") " pod="openstack/glance-default-internal-api-0" Oct 11 05:08:17 crc kubenswrapper[4651]: I1011 05:08:17.547319 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b4t6z\" (UniqueName: \"kubernetes.io/projected/5c04eb41-eb14-4687-a0fa-56f07612da15-kube-api-access-b4t6z\") pod \"glance-default-internal-api-0\" (UID: \"5c04eb41-eb14-4687-a0fa-56f07612da15\") " pod="openstack/glance-default-internal-api-0" Oct 11 05:08:17 crc kubenswrapper[4651]: I1011 05:08:17.547390 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5c04eb41-eb14-4687-a0fa-56f07612da15-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"5c04eb41-eb14-4687-a0fa-56f07612da15\") " pod="openstack/glance-default-internal-api-0" Oct 11 05:08:17 crc kubenswrapper[4651]: I1011 05:08:17.547471 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c04eb41-eb14-4687-a0fa-56f07612da15-config-data\") pod \"glance-default-internal-api-0\" (UID: \"5c04eb41-eb14-4687-a0fa-56f07612da15\") " pod="openstack/glance-default-internal-api-0" Oct 11 05:08:17 crc kubenswrapper[4651]: I1011 05:08:17.547507 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-internal-api-0\" (UID: \"5c04eb41-eb14-4687-a0fa-56f07612da15\") " pod="openstack/glance-default-internal-api-0" Oct 11 05:08:17 crc kubenswrapper[4651]: I1011 05:08:17.547596 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5c04eb41-eb14-4687-a0fa-56f07612da15-logs\") pod \"glance-default-internal-api-0\" (UID: \"5c04eb41-eb14-4687-a0fa-56f07612da15\") " pod="openstack/glance-default-internal-api-0" Oct 11 05:08:17 crc kubenswrapper[4651]: I1011 05:08:17.547639 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5c04eb41-eb14-4687-a0fa-56f07612da15-scripts\") pod \"glance-default-internal-api-0\" (UID: \"5c04eb41-eb14-4687-a0fa-56f07612da15\") " pod="openstack/glance-default-internal-api-0" Oct 11 05:08:17 crc kubenswrapper[4651]: I1011 05:08:17.547677 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c04eb41-eb14-4687-a0fa-56f07612da15-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"5c04eb41-eb14-4687-a0fa-56f07612da15\") " pod="openstack/glance-default-internal-api-0" Oct 11 05:08:17 crc kubenswrapper[4651]: I1011 05:08:17.547765 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5c04eb41-eb14-4687-a0fa-56f07612da15-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"5c04eb41-eb14-4687-a0fa-56f07612da15\") " pod="openstack/glance-default-internal-api-0" Oct 11 05:08:17 crc kubenswrapper[4651]: I1011 05:08:17.548151 4651 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-internal-api-0\" (UID: \"5c04eb41-eb14-4687-a0fa-56f07612da15\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/glance-default-internal-api-0" Oct 11 05:08:17 crc kubenswrapper[4651]: I1011 05:08:17.560680 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5c04eb41-eb14-4687-a0fa-56f07612da15-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"5c04eb41-eb14-4687-a0fa-56f07612da15\") " pod="openstack/glance-default-internal-api-0" Oct 11 05:08:17 crc kubenswrapper[4651]: I1011 05:08:17.566971 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5c04eb41-eb14-4687-a0fa-56f07612da15-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"5c04eb41-eb14-4687-a0fa-56f07612da15\") " pod="openstack/glance-default-internal-api-0" Oct 11 05:08:17 crc kubenswrapper[4651]: I1011 05:08:17.567937 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b4t6z\" (UniqueName: \"kubernetes.io/projected/5c04eb41-eb14-4687-a0fa-56f07612da15-kube-api-access-b4t6z\") pod \"glance-default-internal-api-0\" (UID: \"5c04eb41-eb14-4687-a0fa-56f07612da15\") " pod="openstack/glance-default-internal-api-0" Oct 11 05:08:17 crc kubenswrapper[4651]: I1011 05:08:17.568248 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5c04eb41-eb14-4687-a0fa-56f07612da15-logs\") pod \"glance-default-internal-api-0\" (UID: \"5c04eb41-eb14-4687-a0fa-56f07612da15\") " pod="openstack/glance-default-internal-api-0" Oct 11 05:08:17 crc kubenswrapper[4651]: I1011 05:08:17.571043 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c04eb41-eb14-4687-a0fa-56f07612da15-config-data\") pod \"glance-default-internal-api-0\" (UID: \"5c04eb41-eb14-4687-a0fa-56f07612da15\") " pod="openstack/glance-default-internal-api-0" Oct 11 05:08:17 crc kubenswrapper[4651]: I1011 05:08:17.581347 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5c04eb41-eb14-4687-a0fa-56f07612da15-scripts\") pod \"glance-default-internal-api-0\" (UID: \"5c04eb41-eb14-4687-a0fa-56f07612da15\") " pod="openstack/glance-default-internal-api-0" Oct 11 05:08:17 crc kubenswrapper[4651]: I1011 05:08:17.587858 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c04eb41-eb14-4687-a0fa-56f07612da15-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"5c04eb41-eb14-4687-a0fa-56f07612da15\") " pod="openstack/glance-default-internal-api-0" Oct 11 05:08:17 crc kubenswrapper[4651]: I1011 05:08:17.608009 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-internal-api-0\" (UID: \"5c04eb41-eb14-4687-a0fa-56f07612da15\") " pod="openstack/glance-default-internal-api-0" Oct 11 05:08:17 crc kubenswrapper[4651]: I1011 05:08:17.636749 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 11 05:08:17 crc kubenswrapper[4651]: I1011 05:08:17.889349 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f2d90a7b-5995-43a7-8c9b-05639ac9e9fe" path="/var/lib/kubelet/pods/f2d90a7b-5995-43a7-8c9b-05639ac9e9fe/volumes" Oct 11 05:08:18 crc kubenswrapper[4651]: I1011 05:08:18.371674 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5ccc5c4795-9pngt" Oct 11 05:08:18 crc kubenswrapper[4651]: I1011 05:08:18.499186 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57c957c4ff-t96wz"] Oct 11 05:08:18 crc kubenswrapper[4651]: I1011 05:08:18.499393 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-57c957c4ff-t96wz" podUID="90edbdf3-b435-459f-9a74-3f9ea9ace40f" containerName="dnsmasq-dns" containerID="cri-o://07cb7d8f88667b10b54ad08748c84e647f86e6063360be6a1be28033d4f359f2" gracePeriod=10 Oct 11 05:08:18 crc kubenswrapper[4651]: I1011 05:08:18.525483 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-9rf4f" Oct 11 05:08:18 crc kubenswrapper[4651]: I1011 05:08:18.690511 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/261ffa67-4305-4260-903d-93b8af576721-config-data\") pod \"261ffa67-4305-4260-903d-93b8af576721\" (UID: \"261ffa67-4305-4260-903d-93b8af576721\") " Oct 11 05:08:18 crc kubenswrapper[4651]: I1011 05:08:18.690836 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r6s8f\" (UniqueName: \"kubernetes.io/projected/261ffa67-4305-4260-903d-93b8af576721-kube-api-access-r6s8f\") pod \"261ffa67-4305-4260-903d-93b8af576721\" (UID: \"261ffa67-4305-4260-903d-93b8af576721\") " Oct 11 05:08:18 crc kubenswrapper[4651]: I1011 05:08:18.690890 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/261ffa67-4305-4260-903d-93b8af576721-credential-keys\") pod \"261ffa67-4305-4260-903d-93b8af576721\" (UID: \"261ffa67-4305-4260-903d-93b8af576721\") " Oct 11 05:08:18 crc kubenswrapper[4651]: I1011 05:08:18.690928 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/261ffa67-4305-4260-903d-93b8af576721-scripts\") pod \"261ffa67-4305-4260-903d-93b8af576721\" (UID: \"261ffa67-4305-4260-903d-93b8af576721\") " Oct 11 05:08:18 crc kubenswrapper[4651]: I1011 05:08:18.691019 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/261ffa67-4305-4260-903d-93b8af576721-combined-ca-bundle\") pod \"261ffa67-4305-4260-903d-93b8af576721\" (UID: \"261ffa67-4305-4260-903d-93b8af576721\") " Oct 11 05:08:18 crc kubenswrapper[4651]: I1011 05:08:18.691041 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/261ffa67-4305-4260-903d-93b8af576721-fernet-keys\") pod \"261ffa67-4305-4260-903d-93b8af576721\" (UID: \"261ffa67-4305-4260-903d-93b8af576721\") " Oct 11 05:08:18 crc kubenswrapper[4651]: I1011 05:08:18.708992 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/261ffa67-4305-4260-903d-93b8af576721-kube-api-access-r6s8f" (OuterVolumeSpecName: "kube-api-access-r6s8f") pod "261ffa67-4305-4260-903d-93b8af576721" (UID: "261ffa67-4305-4260-903d-93b8af576721"). InnerVolumeSpecName "kube-api-access-r6s8f". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:08:18 crc kubenswrapper[4651]: I1011 05:08:18.710928 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/261ffa67-4305-4260-903d-93b8af576721-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "261ffa67-4305-4260-903d-93b8af576721" (UID: "261ffa67-4305-4260-903d-93b8af576721"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:08:18 crc kubenswrapper[4651]: I1011 05:08:18.715874 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/261ffa67-4305-4260-903d-93b8af576721-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "261ffa67-4305-4260-903d-93b8af576721" (UID: "261ffa67-4305-4260-903d-93b8af576721"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:08:18 crc kubenswrapper[4651]: I1011 05:08:18.726332 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/261ffa67-4305-4260-903d-93b8af576721-scripts" (OuterVolumeSpecName: "scripts") pod "261ffa67-4305-4260-903d-93b8af576721" (UID: "261ffa67-4305-4260-903d-93b8af576721"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:08:18 crc kubenswrapper[4651]: I1011 05:08:18.762708 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/261ffa67-4305-4260-903d-93b8af576721-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "261ffa67-4305-4260-903d-93b8af576721" (UID: "261ffa67-4305-4260-903d-93b8af576721"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:08:18 crc kubenswrapper[4651]: I1011 05:08:18.762808 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/261ffa67-4305-4260-903d-93b8af576721-config-data" (OuterVolumeSpecName: "config-data") pod "261ffa67-4305-4260-903d-93b8af576721" (UID: "261ffa67-4305-4260-903d-93b8af576721"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:08:18 crc kubenswrapper[4651]: I1011 05:08:18.799042 4651 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/261ffa67-4305-4260-903d-93b8af576721-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:18 crc kubenswrapper[4651]: I1011 05:08:18.799069 4651 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/261ffa67-4305-4260-903d-93b8af576721-fernet-keys\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:18 crc kubenswrapper[4651]: I1011 05:08:18.799078 4651 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/261ffa67-4305-4260-903d-93b8af576721-config-data\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:18 crc kubenswrapper[4651]: I1011 05:08:18.799086 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r6s8f\" (UniqueName: \"kubernetes.io/projected/261ffa67-4305-4260-903d-93b8af576721-kube-api-access-r6s8f\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:18 crc kubenswrapper[4651]: I1011 05:08:18.799096 4651 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/261ffa67-4305-4260-903d-93b8af576721-credential-keys\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:18 crc kubenswrapper[4651]: I1011 05:08:18.799104 4651 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/261ffa67-4305-4260-903d-93b8af576721-scripts\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:19 crc kubenswrapper[4651]: I1011 05:08:19.040454 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57c957c4ff-t96wz" Oct 11 05:08:19 crc kubenswrapper[4651]: I1011 05:08:19.104397 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/90edbdf3-b435-459f-9a74-3f9ea9ace40f-config\") pod \"90edbdf3-b435-459f-9a74-3f9ea9ace40f\" (UID: \"90edbdf3-b435-459f-9a74-3f9ea9ace40f\") " Oct 11 05:08:19 crc kubenswrapper[4651]: I1011 05:08:19.104596 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rp2lz\" (UniqueName: \"kubernetes.io/projected/90edbdf3-b435-459f-9a74-3f9ea9ace40f-kube-api-access-rp2lz\") pod \"90edbdf3-b435-459f-9a74-3f9ea9ace40f\" (UID: \"90edbdf3-b435-459f-9a74-3f9ea9ace40f\") " Oct 11 05:08:19 crc kubenswrapper[4651]: I1011 05:08:19.104641 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/90edbdf3-b435-459f-9a74-3f9ea9ace40f-ovsdbserver-nb\") pod \"90edbdf3-b435-459f-9a74-3f9ea9ace40f\" (UID: \"90edbdf3-b435-459f-9a74-3f9ea9ace40f\") " Oct 11 05:08:19 crc kubenswrapper[4651]: I1011 05:08:19.104666 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/90edbdf3-b435-459f-9a74-3f9ea9ace40f-dns-svc\") pod \"90edbdf3-b435-459f-9a74-3f9ea9ace40f\" (UID: \"90edbdf3-b435-459f-9a74-3f9ea9ace40f\") " Oct 11 05:08:19 crc kubenswrapper[4651]: I1011 05:08:19.104710 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/90edbdf3-b435-459f-9a74-3f9ea9ace40f-ovsdbserver-sb\") pod \"90edbdf3-b435-459f-9a74-3f9ea9ace40f\" (UID: \"90edbdf3-b435-459f-9a74-3f9ea9ace40f\") " Oct 11 05:08:19 crc kubenswrapper[4651]: I1011 05:08:19.104738 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/90edbdf3-b435-459f-9a74-3f9ea9ace40f-dns-swift-storage-0\") pod \"90edbdf3-b435-459f-9a74-3f9ea9ace40f\" (UID: \"90edbdf3-b435-459f-9a74-3f9ea9ace40f\") " Oct 11 05:08:19 crc kubenswrapper[4651]: I1011 05:08:19.110451 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/90edbdf3-b435-459f-9a74-3f9ea9ace40f-kube-api-access-rp2lz" (OuterVolumeSpecName: "kube-api-access-rp2lz") pod "90edbdf3-b435-459f-9a74-3f9ea9ace40f" (UID: "90edbdf3-b435-459f-9a74-3f9ea9ace40f"). InnerVolumeSpecName "kube-api-access-rp2lz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:08:19 crc kubenswrapper[4651]: I1011 05:08:19.148749 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/90edbdf3-b435-459f-9a74-3f9ea9ace40f-config" (OuterVolumeSpecName: "config") pod "90edbdf3-b435-459f-9a74-3f9ea9ace40f" (UID: "90edbdf3-b435-459f-9a74-3f9ea9ace40f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:08:19 crc kubenswrapper[4651]: I1011 05:08:19.160358 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/90edbdf3-b435-459f-9a74-3f9ea9ace40f-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "90edbdf3-b435-459f-9a74-3f9ea9ace40f" (UID: "90edbdf3-b435-459f-9a74-3f9ea9ace40f"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:08:19 crc kubenswrapper[4651]: I1011 05:08:19.178865 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/90edbdf3-b435-459f-9a74-3f9ea9ace40f-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "90edbdf3-b435-459f-9a74-3f9ea9ace40f" (UID: "90edbdf3-b435-459f-9a74-3f9ea9ace40f"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:08:19 crc kubenswrapper[4651]: I1011 05:08:19.200866 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/90edbdf3-b435-459f-9a74-3f9ea9ace40f-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "90edbdf3-b435-459f-9a74-3f9ea9ace40f" (UID: "90edbdf3-b435-459f-9a74-3f9ea9ace40f"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:08:19 crc kubenswrapper[4651]: I1011 05:08:19.204316 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/90edbdf3-b435-459f-9a74-3f9ea9ace40f-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "90edbdf3-b435-459f-9a74-3f9ea9ace40f" (UID: "90edbdf3-b435-459f-9a74-3f9ea9ace40f"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:08:19 crc kubenswrapper[4651]: I1011 05:08:19.206897 4651 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/90edbdf3-b435-459f-9a74-3f9ea9ace40f-config\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:19 crc kubenswrapper[4651]: I1011 05:08:19.206929 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rp2lz\" (UniqueName: \"kubernetes.io/projected/90edbdf3-b435-459f-9a74-3f9ea9ace40f-kube-api-access-rp2lz\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:19 crc kubenswrapper[4651]: I1011 05:08:19.206939 4651 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/90edbdf3-b435-459f-9a74-3f9ea9ace40f-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:19 crc kubenswrapper[4651]: I1011 05:08:19.206948 4651 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/90edbdf3-b435-459f-9a74-3f9ea9ace40f-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:19 crc kubenswrapper[4651]: I1011 05:08:19.206957 4651 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/90edbdf3-b435-459f-9a74-3f9ea9ace40f-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:19 crc kubenswrapper[4651]: I1011 05:08:19.206967 4651 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/90edbdf3-b435-459f-9a74-3f9ea9ace40f-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:19 crc kubenswrapper[4651]: I1011 05:08:19.214295 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-ctfxs" event={"ID":"96c528a9-d9c6-4eec-b63f-5bba189744ae","Type":"ContainerStarted","Data":"9b3929e4a24444e7ad80fa4b08644d48ced2c503be1cd6e36a2ae7c2b6ad7b47"} Oct 11 05:08:19 crc kubenswrapper[4651]: I1011 05:08:19.218762 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"16b0d24a-e647-4381-9f03-9b48c34ba52f","Type":"ContainerStarted","Data":"f323265d37b6f01bd3b83da9590c0d981a9001e7f7522dd167e346b2ed31a8bc"} Oct 11 05:08:19 crc kubenswrapper[4651]: I1011 05:08:19.220506 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-9rf4f" event={"ID":"261ffa67-4305-4260-903d-93b8af576721","Type":"ContainerDied","Data":"e7b03273c6c46ec518ef5af2fb012a499e1985e542ef18ecee3ccb7d8b86cb91"} Oct 11 05:08:19 crc kubenswrapper[4651]: I1011 05:08:19.220527 4651 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e7b03273c6c46ec518ef5af2fb012a499e1985e542ef18ecee3ccb7d8b86cb91" Oct 11 05:08:19 crc kubenswrapper[4651]: I1011 05:08:19.220567 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-9rf4f" Oct 11 05:08:19 crc kubenswrapper[4651]: I1011 05:08:19.228273 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-chq8s" event={"ID":"4f425c3a-376b-4ba1-8066-96b2d1f21698","Type":"ContainerStarted","Data":"9ce0c8b78d59e4cc58fe7fcc774cd7271869fa41a4e09f7e1fa4d0c2a13fd3fd"} Oct 11 05:08:19 crc kubenswrapper[4651]: I1011 05:08:19.230806 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-ctfxs" podStartSLOduration=2.65572083 podStartE2EDuration="43.230791102s" podCreationTimestamp="2025-10-11 05:07:36 +0000 UTC" firstStartedPulling="2025-10-11 05:07:38.022934748 +0000 UTC m=+978.919167544" lastFinishedPulling="2025-10-11 05:08:18.59800502 +0000 UTC m=+1019.494237816" observedRunningTime="2025-10-11 05:08:19.228987477 +0000 UTC m=+1020.125220293" watchObservedRunningTime="2025-10-11 05:08:19.230791102 +0000 UTC m=+1020.127023898" Oct 11 05:08:19 crc kubenswrapper[4651]: I1011 05:08:19.239092 4651 generic.go:334] "Generic (PLEG): container finished" podID="90edbdf3-b435-459f-9a74-3f9ea9ace40f" containerID="07cb7d8f88667b10b54ad08748c84e647f86e6063360be6a1be28033d4f359f2" exitCode=0 Oct 11 05:08:19 crc kubenswrapper[4651]: I1011 05:08:19.239180 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57c957c4ff-t96wz" event={"ID":"90edbdf3-b435-459f-9a74-3f9ea9ace40f","Type":"ContainerDied","Data":"07cb7d8f88667b10b54ad08748c84e647f86e6063360be6a1be28033d4f359f2"} Oct 11 05:08:19 crc kubenswrapper[4651]: I1011 05:08:19.239212 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57c957c4ff-t96wz" event={"ID":"90edbdf3-b435-459f-9a74-3f9ea9ace40f","Type":"ContainerDied","Data":"3e7e41767dc31430b727bb45ff706b3bafecd52a1e2f81bbb0fa3d4dd063d673"} Oct 11 05:08:19 crc kubenswrapper[4651]: I1011 05:08:19.239228 4651 scope.go:117] "RemoveContainer" containerID="07cb7d8f88667b10b54ad08748c84e647f86e6063360be6a1be28033d4f359f2" Oct 11 05:08:19 crc kubenswrapper[4651]: I1011 05:08:19.239354 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57c957c4ff-t96wz" Oct 11 05:08:19 crc kubenswrapper[4651]: I1011 05:08:19.247751 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6856c774b5-fq9r6" event={"ID":"301b63d9-53a7-49b2-9d71-2b2bf854de89","Type":"ContainerStarted","Data":"ead1b8581255b1cdb21507af11dbb762e52a8d8d27e58c5f4118a16976cc5adc"} Oct 11 05:08:19 crc kubenswrapper[4651]: I1011 05:08:19.249811 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-6856c774b5-fq9r6" Oct 11 05:08:19 crc kubenswrapper[4651]: I1011 05:08:19.252420 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-chq8s" podStartSLOduration=2.98155853 podStartE2EDuration="43.252403092s" podCreationTimestamp="2025-10-11 05:07:36 +0000 UTC" firstStartedPulling="2025-10-11 05:07:38.518252221 +0000 UTC m=+979.414485017" lastFinishedPulling="2025-10-11 05:08:18.789096783 +0000 UTC m=+1019.685329579" observedRunningTime="2025-10-11 05:08:19.245891807 +0000 UTC m=+1020.142124603" watchObservedRunningTime="2025-10-11 05:08:19.252403092 +0000 UTC m=+1020.148635888" Oct 11 05:08:19 crc kubenswrapper[4651]: I1011 05:08:19.266437 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 11 05:08:19 crc kubenswrapper[4651]: I1011 05:08:19.269918 4651 scope.go:117] "RemoveContainer" containerID="ab365c1772db9f96df995b71f01fde27590241651930e87f697505025de79fc4" Oct 11 05:08:19 crc kubenswrapper[4651]: I1011 05:08:19.291229 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-6856c774b5-fq9r6" podStartSLOduration=9.29120269 podStartE2EDuration="9.29120269s" podCreationTimestamp="2025-10-11 05:08:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 05:08:19.276644259 +0000 UTC m=+1020.172877055" watchObservedRunningTime="2025-10-11 05:08:19.29120269 +0000 UTC m=+1020.187435486" Oct 11 05:08:19 crc kubenswrapper[4651]: I1011 05:08:19.298550 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57c957c4ff-t96wz"] Oct 11 05:08:19 crc kubenswrapper[4651]: I1011 05:08:19.305018 4651 scope.go:117] "RemoveContainer" containerID="07cb7d8f88667b10b54ad08748c84e647f86e6063360be6a1be28033d4f359f2" Oct 11 05:08:19 crc kubenswrapper[4651]: E1011 05:08:19.305420 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"07cb7d8f88667b10b54ad08748c84e647f86e6063360be6a1be28033d4f359f2\": container with ID starting with 07cb7d8f88667b10b54ad08748c84e647f86e6063360be6a1be28033d4f359f2 not found: ID does not exist" containerID="07cb7d8f88667b10b54ad08748c84e647f86e6063360be6a1be28033d4f359f2" Oct 11 05:08:19 crc kubenswrapper[4651]: I1011 05:08:19.305446 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"07cb7d8f88667b10b54ad08748c84e647f86e6063360be6a1be28033d4f359f2"} err="failed to get container status \"07cb7d8f88667b10b54ad08748c84e647f86e6063360be6a1be28033d4f359f2\": rpc error: code = NotFound desc = could not find container \"07cb7d8f88667b10b54ad08748c84e647f86e6063360be6a1be28033d4f359f2\": container with ID starting with 07cb7d8f88667b10b54ad08748c84e647f86e6063360be6a1be28033d4f359f2 not found: ID does not exist" Oct 11 05:08:19 crc kubenswrapper[4651]: I1011 05:08:19.305466 4651 scope.go:117] "RemoveContainer" containerID="ab365c1772db9f96df995b71f01fde27590241651930e87f697505025de79fc4" Oct 11 05:08:19 crc kubenswrapper[4651]: E1011 05:08:19.306015 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ab365c1772db9f96df995b71f01fde27590241651930e87f697505025de79fc4\": container with ID starting with ab365c1772db9f96df995b71f01fde27590241651930e87f697505025de79fc4 not found: ID does not exist" containerID="ab365c1772db9f96df995b71f01fde27590241651930e87f697505025de79fc4" Oct 11 05:08:19 crc kubenswrapper[4651]: I1011 05:08:19.306076 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ab365c1772db9f96df995b71f01fde27590241651930e87f697505025de79fc4"} err="failed to get container status \"ab365c1772db9f96df995b71f01fde27590241651930e87f697505025de79fc4\": rpc error: code = NotFound desc = could not find container \"ab365c1772db9f96df995b71f01fde27590241651930e87f697505025de79fc4\": container with ID starting with ab365c1772db9f96df995b71f01fde27590241651930e87f697505025de79fc4 not found: ID does not exist" Oct 11 05:08:19 crc kubenswrapper[4651]: I1011 05:08:19.306260 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-57c957c4ff-t96wz"] Oct 11 05:08:19 crc kubenswrapper[4651]: I1011 05:08:19.621298 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-5dbc5c6b84-lhwcw"] Oct 11 05:08:19 crc kubenswrapper[4651]: E1011 05:08:19.621978 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="261ffa67-4305-4260-903d-93b8af576721" containerName="keystone-bootstrap" Oct 11 05:08:19 crc kubenswrapper[4651]: I1011 05:08:19.621992 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="261ffa67-4305-4260-903d-93b8af576721" containerName="keystone-bootstrap" Oct 11 05:08:19 crc kubenswrapper[4651]: E1011 05:08:19.622004 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="90edbdf3-b435-459f-9a74-3f9ea9ace40f" containerName="init" Oct 11 05:08:19 crc kubenswrapper[4651]: I1011 05:08:19.622009 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="90edbdf3-b435-459f-9a74-3f9ea9ace40f" containerName="init" Oct 11 05:08:19 crc kubenswrapper[4651]: E1011 05:08:19.622036 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="90edbdf3-b435-459f-9a74-3f9ea9ace40f" containerName="dnsmasq-dns" Oct 11 05:08:19 crc kubenswrapper[4651]: I1011 05:08:19.622042 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="90edbdf3-b435-459f-9a74-3f9ea9ace40f" containerName="dnsmasq-dns" Oct 11 05:08:19 crc kubenswrapper[4651]: I1011 05:08:19.622219 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="90edbdf3-b435-459f-9a74-3f9ea9ace40f" containerName="dnsmasq-dns" Oct 11 05:08:19 crc kubenswrapper[4651]: I1011 05:08:19.622229 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="261ffa67-4305-4260-903d-93b8af576721" containerName="keystone-bootstrap" Oct 11 05:08:19 crc kubenswrapper[4651]: I1011 05:08:19.622835 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-5dbc5c6b84-lhwcw" Oct 11 05:08:19 crc kubenswrapper[4651]: I1011 05:08:19.630262 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-5dbc5c6b84-lhwcw"] Oct 11 05:08:19 crc kubenswrapper[4651]: I1011 05:08:19.630286 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Oct 11 05:08:19 crc kubenswrapper[4651]: I1011 05:08:19.630520 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Oct 11 05:08:19 crc kubenswrapper[4651]: I1011 05:08:19.630629 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-2zq9v" Oct 11 05:08:19 crc kubenswrapper[4651]: I1011 05:08:19.630674 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Oct 11 05:08:19 crc kubenswrapper[4651]: I1011 05:08:19.630844 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Oct 11 05:08:19 crc kubenswrapper[4651]: I1011 05:08:19.631082 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Oct 11 05:08:19 crc kubenswrapper[4651]: I1011 05:08:19.715903 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/89ac3499-9018-4545-9e5f-f6eda0d14302-config-data\") pod \"keystone-5dbc5c6b84-lhwcw\" (UID: \"89ac3499-9018-4545-9e5f-f6eda0d14302\") " pod="openstack/keystone-5dbc5c6b84-lhwcw" Oct 11 05:08:19 crc kubenswrapper[4651]: I1011 05:08:19.715960 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/89ac3499-9018-4545-9e5f-f6eda0d14302-credential-keys\") pod \"keystone-5dbc5c6b84-lhwcw\" (UID: \"89ac3499-9018-4545-9e5f-f6eda0d14302\") " pod="openstack/keystone-5dbc5c6b84-lhwcw" Oct 11 05:08:19 crc kubenswrapper[4651]: I1011 05:08:19.716001 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/89ac3499-9018-4545-9e5f-f6eda0d14302-scripts\") pod \"keystone-5dbc5c6b84-lhwcw\" (UID: \"89ac3499-9018-4545-9e5f-f6eda0d14302\") " pod="openstack/keystone-5dbc5c6b84-lhwcw" Oct 11 05:08:19 crc kubenswrapper[4651]: I1011 05:08:19.716031 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gqvpf\" (UniqueName: \"kubernetes.io/projected/89ac3499-9018-4545-9e5f-f6eda0d14302-kube-api-access-gqvpf\") pod \"keystone-5dbc5c6b84-lhwcw\" (UID: \"89ac3499-9018-4545-9e5f-f6eda0d14302\") " pod="openstack/keystone-5dbc5c6b84-lhwcw" Oct 11 05:08:19 crc kubenswrapper[4651]: I1011 05:08:19.716085 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/89ac3499-9018-4545-9e5f-f6eda0d14302-internal-tls-certs\") pod \"keystone-5dbc5c6b84-lhwcw\" (UID: \"89ac3499-9018-4545-9e5f-f6eda0d14302\") " pod="openstack/keystone-5dbc5c6b84-lhwcw" Oct 11 05:08:19 crc kubenswrapper[4651]: I1011 05:08:19.716108 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/89ac3499-9018-4545-9e5f-f6eda0d14302-fernet-keys\") pod \"keystone-5dbc5c6b84-lhwcw\" (UID: \"89ac3499-9018-4545-9e5f-f6eda0d14302\") " pod="openstack/keystone-5dbc5c6b84-lhwcw" Oct 11 05:08:19 crc kubenswrapper[4651]: I1011 05:08:19.716137 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/89ac3499-9018-4545-9e5f-f6eda0d14302-combined-ca-bundle\") pod \"keystone-5dbc5c6b84-lhwcw\" (UID: \"89ac3499-9018-4545-9e5f-f6eda0d14302\") " pod="openstack/keystone-5dbc5c6b84-lhwcw" Oct 11 05:08:19 crc kubenswrapper[4651]: I1011 05:08:19.716153 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/89ac3499-9018-4545-9e5f-f6eda0d14302-public-tls-certs\") pod \"keystone-5dbc5c6b84-lhwcw\" (UID: \"89ac3499-9018-4545-9e5f-f6eda0d14302\") " pod="openstack/keystone-5dbc5c6b84-lhwcw" Oct 11 05:08:19 crc kubenswrapper[4651]: I1011 05:08:19.817551 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/89ac3499-9018-4545-9e5f-f6eda0d14302-internal-tls-certs\") pod \"keystone-5dbc5c6b84-lhwcw\" (UID: \"89ac3499-9018-4545-9e5f-f6eda0d14302\") " pod="openstack/keystone-5dbc5c6b84-lhwcw" Oct 11 05:08:19 crc kubenswrapper[4651]: I1011 05:08:19.817684 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/89ac3499-9018-4545-9e5f-f6eda0d14302-fernet-keys\") pod \"keystone-5dbc5c6b84-lhwcw\" (UID: \"89ac3499-9018-4545-9e5f-f6eda0d14302\") " pod="openstack/keystone-5dbc5c6b84-lhwcw" Oct 11 05:08:19 crc kubenswrapper[4651]: I1011 05:08:19.817749 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/89ac3499-9018-4545-9e5f-f6eda0d14302-combined-ca-bundle\") pod \"keystone-5dbc5c6b84-lhwcw\" (UID: \"89ac3499-9018-4545-9e5f-f6eda0d14302\") " pod="openstack/keystone-5dbc5c6b84-lhwcw" Oct 11 05:08:19 crc kubenswrapper[4651]: I1011 05:08:19.817864 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/89ac3499-9018-4545-9e5f-f6eda0d14302-public-tls-certs\") pod \"keystone-5dbc5c6b84-lhwcw\" (UID: \"89ac3499-9018-4545-9e5f-f6eda0d14302\") " pod="openstack/keystone-5dbc5c6b84-lhwcw" Oct 11 05:08:19 crc kubenswrapper[4651]: I1011 05:08:19.818004 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/89ac3499-9018-4545-9e5f-f6eda0d14302-config-data\") pod \"keystone-5dbc5c6b84-lhwcw\" (UID: \"89ac3499-9018-4545-9e5f-f6eda0d14302\") " pod="openstack/keystone-5dbc5c6b84-lhwcw" Oct 11 05:08:19 crc kubenswrapper[4651]: I1011 05:08:19.818090 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/89ac3499-9018-4545-9e5f-f6eda0d14302-credential-keys\") pod \"keystone-5dbc5c6b84-lhwcw\" (UID: \"89ac3499-9018-4545-9e5f-f6eda0d14302\") " pod="openstack/keystone-5dbc5c6b84-lhwcw" Oct 11 05:08:19 crc kubenswrapper[4651]: I1011 05:08:19.818159 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/89ac3499-9018-4545-9e5f-f6eda0d14302-scripts\") pod \"keystone-5dbc5c6b84-lhwcw\" (UID: \"89ac3499-9018-4545-9e5f-f6eda0d14302\") " pod="openstack/keystone-5dbc5c6b84-lhwcw" Oct 11 05:08:19 crc kubenswrapper[4651]: I1011 05:08:19.818237 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gqvpf\" (UniqueName: \"kubernetes.io/projected/89ac3499-9018-4545-9e5f-f6eda0d14302-kube-api-access-gqvpf\") pod \"keystone-5dbc5c6b84-lhwcw\" (UID: \"89ac3499-9018-4545-9e5f-f6eda0d14302\") " pod="openstack/keystone-5dbc5c6b84-lhwcw" Oct 11 05:08:19 crc kubenswrapper[4651]: I1011 05:08:19.820999 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Oct 11 05:08:19 crc kubenswrapper[4651]: I1011 05:08:19.821283 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Oct 11 05:08:19 crc kubenswrapper[4651]: I1011 05:08:19.821478 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Oct 11 05:08:19 crc kubenswrapper[4651]: I1011 05:08:19.821649 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Oct 11 05:08:19 crc kubenswrapper[4651]: I1011 05:08:19.821928 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Oct 11 05:08:19 crc kubenswrapper[4651]: I1011 05:08:19.827326 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/89ac3499-9018-4545-9e5f-f6eda0d14302-combined-ca-bundle\") pod \"keystone-5dbc5c6b84-lhwcw\" (UID: \"89ac3499-9018-4545-9e5f-f6eda0d14302\") " pod="openstack/keystone-5dbc5c6b84-lhwcw" Oct 11 05:08:19 crc kubenswrapper[4651]: I1011 05:08:19.831709 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/89ac3499-9018-4545-9e5f-f6eda0d14302-internal-tls-certs\") pod \"keystone-5dbc5c6b84-lhwcw\" (UID: \"89ac3499-9018-4545-9e5f-f6eda0d14302\") " pod="openstack/keystone-5dbc5c6b84-lhwcw" Oct 11 05:08:19 crc kubenswrapper[4651]: I1011 05:08:19.834058 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/89ac3499-9018-4545-9e5f-f6eda0d14302-public-tls-certs\") pod \"keystone-5dbc5c6b84-lhwcw\" (UID: \"89ac3499-9018-4545-9e5f-f6eda0d14302\") " pod="openstack/keystone-5dbc5c6b84-lhwcw" Oct 11 05:08:19 crc kubenswrapper[4651]: I1011 05:08:19.834064 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/89ac3499-9018-4545-9e5f-f6eda0d14302-fernet-keys\") pod \"keystone-5dbc5c6b84-lhwcw\" (UID: \"89ac3499-9018-4545-9e5f-f6eda0d14302\") " pod="openstack/keystone-5dbc5c6b84-lhwcw" Oct 11 05:08:19 crc kubenswrapper[4651]: I1011 05:08:19.836882 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gqvpf\" (UniqueName: \"kubernetes.io/projected/89ac3499-9018-4545-9e5f-f6eda0d14302-kube-api-access-gqvpf\") pod \"keystone-5dbc5c6b84-lhwcw\" (UID: \"89ac3499-9018-4545-9e5f-f6eda0d14302\") " pod="openstack/keystone-5dbc5c6b84-lhwcw" Oct 11 05:08:19 crc kubenswrapper[4651]: I1011 05:08:19.837606 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/89ac3499-9018-4545-9e5f-f6eda0d14302-config-data\") pod \"keystone-5dbc5c6b84-lhwcw\" (UID: \"89ac3499-9018-4545-9e5f-f6eda0d14302\") " pod="openstack/keystone-5dbc5c6b84-lhwcw" Oct 11 05:08:19 crc kubenswrapper[4651]: I1011 05:08:19.838523 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/89ac3499-9018-4545-9e5f-f6eda0d14302-scripts\") pod \"keystone-5dbc5c6b84-lhwcw\" (UID: \"89ac3499-9018-4545-9e5f-f6eda0d14302\") " pod="openstack/keystone-5dbc5c6b84-lhwcw" Oct 11 05:08:19 crc kubenswrapper[4651]: I1011 05:08:19.846995 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/89ac3499-9018-4545-9e5f-f6eda0d14302-credential-keys\") pod \"keystone-5dbc5c6b84-lhwcw\" (UID: \"89ac3499-9018-4545-9e5f-f6eda0d14302\") " pod="openstack/keystone-5dbc5c6b84-lhwcw" Oct 11 05:08:19 crc kubenswrapper[4651]: I1011 05:08:19.895971 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="90edbdf3-b435-459f-9a74-3f9ea9ace40f" path="/var/lib/kubelet/pods/90edbdf3-b435-459f-9a74-3f9ea9ace40f/volumes" Oct 11 05:08:19 crc kubenswrapper[4651]: I1011 05:08:19.981343 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-2zq9v" Oct 11 05:08:19 crc kubenswrapper[4651]: I1011 05:08:19.989470 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-5dbc5c6b84-lhwcw" Oct 11 05:08:20 crc kubenswrapper[4651]: I1011 05:08:20.270318 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"5c04eb41-eb14-4687-a0fa-56f07612da15","Type":"ContainerStarted","Data":"dc526a3f9f7a83761b341c698b7ac5c3eea9e0a70fbc06131f5b4708b2e6dd8d"} Oct 11 05:08:20 crc kubenswrapper[4651]: I1011 05:08:20.270360 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"5c04eb41-eb14-4687-a0fa-56f07612da15","Type":"ContainerStarted","Data":"0feb0045d385ab90a5341d4e00169b66f6ebeafe187fa06edb2be9a02a78e5c7"} Oct 11 05:08:20 crc kubenswrapper[4651]: I1011 05:08:20.272875 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"6d67b157-9258-41f2-b942-bc5a54f1ea21","Type":"ContainerStarted","Data":"ad867022b00e0c78ace5afa4d483a112cefb1a74135162c9bbe0403ebcac754a"} Oct 11 05:08:20 crc kubenswrapper[4651]: I1011 05:08:20.301367 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=10.301348414 podStartE2EDuration="10.301348414s" podCreationTimestamp="2025-10-11 05:08:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 05:08:20.297729992 +0000 UTC m=+1021.193962788" watchObservedRunningTime="2025-10-11 05:08:20.301348414 +0000 UTC m=+1021.197581210" Oct 11 05:08:20 crc kubenswrapper[4651]: I1011 05:08:20.511579 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-5dbc5c6b84-lhwcw"] Oct 11 05:08:20 crc kubenswrapper[4651]: I1011 05:08:20.522336 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Oct 11 05:08:20 crc kubenswrapper[4651]: I1011 05:08:20.522479 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Oct 11 05:08:20 crc kubenswrapper[4651]: I1011 05:08:20.603110 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Oct 11 05:08:20 crc kubenswrapper[4651]: I1011 05:08:20.613547 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Oct 11 05:08:21 crc kubenswrapper[4651]: I1011 05:08:21.288451 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"5c04eb41-eb14-4687-a0fa-56f07612da15","Type":"ContainerStarted","Data":"1510c5a0e22592e06028411eccbbe08cd69f4981baf82da22394ea597b956109"} Oct 11 05:08:21 crc kubenswrapper[4651]: I1011 05:08:21.290521 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-5dbc5c6b84-lhwcw" event={"ID":"89ac3499-9018-4545-9e5f-f6eda0d14302","Type":"ContainerStarted","Data":"b4c5d6040c9b67fb7110e3c57e31e0e11d10f6779add04da30c0923e4f58285e"} Oct 11 05:08:21 crc kubenswrapper[4651]: I1011 05:08:21.290574 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-5dbc5c6b84-lhwcw" event={"ID":"89ac3499-9018-4545-9e5f-f6eda0d14302","Type":"ContainerStarted","Data":"d2ed4bdcccf402b86449b791b8380112e3ad777b8d52b930d15fca14e7044c50"} Oct 11 05:08:21 crc kubenswrapper[4651]: I1011 05:08:21.300258 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Oct 11 05:08:21 crc kubenswrapper[4651]: I1011 05:08:21.300894 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Oct 11 05:08:21 crc kubenswrapper[4651]: I1011 05:08:21.300977 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-5dbc5c6b84-lhwcw" Oct 11 05:08:21 crc kubenswrapper[4651]: I1011 05:08:21.334086 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=4.334073883 podStartE2EDuration="4.334073883s" podCreationTimestamp="2025-10-11 05:08:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 05:08:21.331094797 +0000 UTC m=+1022.227327593" watchObservedRunningTime="2025-10-11 05:08:21.334073883 +0000 UTC m=+1022.230306679" Oct 11 05:08:21 crc kubenswrapper[4651]: I1011 05:08:21.352500 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-5dbc5c6b84-lhwcw" podStartSLOduration=2.352484611 podStartE2EDuration="2.352484611s" podCreationTimestamp="2025-10-11 05:08:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 05:08:21.350895811 +0000 UTC m=+1022.247128627" watchObservedRunningTime="2025-10-11 05:08:21.352484611 +0000 UTC m=+1022.248717397" Oct 11 05:08:22 crc kubenswrapper[4651]: I1011 05:08:22.304151 4651 generic.go:334] "Generic (PLEG): container finished" podID="96c528a9-d9c6-4eec-b63f-5bba189744ae" containerID="9b3929e4a24444e7ad80fa4b08644d48ced2c503be1cd6e36a2ae7c2b6ad7b47" exitCode=0 Oct 11 05:08:22 crc kubenswrapper[4651]: I1011 05:08:22.304450 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-ctfxs" event={"ID":"96c528a9-d9c6-4eec-b63f-5bba189744ae","Type":"ContainerDied","Data":"9b3929e4a24444e7ad80fa4b08644d48ced2c503be1cd6e36a2ae7c2b6ad7b47"} Oct 11 05:08:22 crc kubenswrapper[4651]: I1011 05:08:22.307239 4651 generic.go:334] "Generic (PLEG): container finished" podID="4f425c3a-376b-4ba1-8066-96b2d1f21698" containerID="9ce0c8b78d59e4cc58fe7fcc774cd7271869fa41a4e09f7e1fa4d0c2a13fd3fd" exitCode=0 Oct 11 05:08:22 crc kubenswrapper[4651]: I1011 05:08:22.307311 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-chq8s" event={"ID":"4f425c3a-376b-4ba1-8066-96b2d1f21698","Type":"ContainerDied","Data":"9ce0c8b78d59e4cc58fe7fcc774cd7271869fa41a4e09f7e1fa4d0c2a13fd3fd"} Oct 11 05:08:23 crc kubenswrapper[4651]: I1011 05:08:23.175285 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Oct 11 05:08:26 crc kubenswrapper[4651]: I1011 05:08:26.195107 4651 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-68976f6bc6-9jl66" podUID="93e6a5fd-218d-44c2-9bf9-2796b1ff0c33" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.150:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.150:8443: connect: connection refused" Oct 11 05:08:26 crc kubenswrapper[4651]: I1011 05:08:26.220788 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-chq8s" Oct 11 05:08:26 crc kubenswrapper[4651]: I1011 05:08:26.229153 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-ctfxs" Oct 11 05:08:26 crc kubenswrapper[4651]: I1011 05:08:26.337586 4651 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-5f7d84485b-zb5s7" podUID="a75ab800-fe1f-453d-b9f9-1cc1f39b6ca7" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.151:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.151:8443: connect: connection refused" Oct 11 05:08:26 crc kubenswrapper[4651]: I1011 05:08:26.347304 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-chq8s" event={"ID":"4f425c3a-376b-4ba1-8066-96b2d1f21698","Type":"ContainerDied","Data":"54246d689260572ea569e72fed4d6476787b1afba4cc68eaed8d65e0b0a33441"} Oct 11 05:08:26 crc kubenswrapper[4651]: I1011 05:08:26.347351 4651 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="54246d689260572ea569e72fed4d6476787b1afba4cc68eaed8d65e0b0a33441" Oct 11 05:08:26 crc kubenswrapper[4651]: I1011 05:08:26.347408 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-chq8s" Oct 11 05:08:26 crc kubenswrapper[4651]: I1011 05:08:26.350003 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-ctfxs" event={"ID":"96c528a9-d9c6-4eec-b63f-5bba189744ae","Type":"ContainerDied","Data":"6a883eefff09ee56dab9b3ee93690f306d8748c8c371d1f0184ee0f0f274f676"} Oct 11 05:08:26 crc kubenswrapper[4651]: I1011 05:08:26.350035 4651 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6a883eefff09ee56dab9b3ee93690f306d8748c8c371d1f0184ee0f0f274f676" Oct 11 05:08:26 crc kubenswrapper[4651]: I1011 05:08:26.350083 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-ctfxs" Oct 11 05:08:26 crc kubenswrapper[4651]: I1011 05:08:26.358101 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/96c528a9-d9c6-4eec-b63f-5bba189744ae-logs\") pod \"96c528a9-d9c6-4eec-b63f-5bba189744ae\" (UID: \"96c528a9-d9c6-4eec-b63f-5bba189744ae\") " Oct 11 05:08:26 crc kubenswrapper[4651]: I1011 05:08:26.358187 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kqcc7\" (UniqueName: \"kubernetes.io/projected/96c528a9-d9c6-4eec-b63f-5bba189744ae-kube-api-access-kqcc7\") pod \"96c528a9-d9c6-4eec-b63f-5bba189744ae\" (UID: \"96c528a9-d9c6-4eec-b63f-5bba189744ae\") " Oct 11 05:08:26 crc kubenswrapper[4651]: I1011 05:08:26.358217 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/4f425c3a-376b-4ba1-8066-96b2d1f21698-db-sync-config-data\") pod \"4f425c3a-376b-4ba1-8066-96b2d1f21698\" (UID: \"4f425c3a-376b-4ba1-8066-96b2d1f21698\") " Oct 11 05:08:26 crc kubenswrapper[4651]: I1011 05:08:26.358276 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/96c528a9-d9c6-4eec-b63f-5bba189744ae-scripts\") pod \"96c528a9-d9c6-4eec-b63f-5bba189744ae\" (UID: \"96c528a9-d9c6-4eec-b63f-5bba189744ae\") " Oct 11 05:08:26 crc kubenswrapper[4651]: I1011 05:08:26.358317 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96c528a9-d9c6-4eec-b63f-5bba189744ae-combined-ca-bundle\") pod \"96c528a9-d9c6-4eec-b63f-5bba189744ae\" (UID: \"96c528a9-d9c6-4eec-b63f-5bba189744ae\") " Oct 11 05:08:26 crc kubenswrapper[4651]: I1011 05:08:26.358421 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f425c3a-376b-4ba1-8066-96b2d1f21698-combined-ca-bundle\") pod \"4f425c3a-376b-4ba1-8066-96b2d1f21698\" (UID: \"4f425c3a-376b-4ba1-8066-96b2d1f21698\") " Oct 11 05:08:26 crc kubenswrapper[4651]: I1011 05:08:26.358451 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/96c528a9-d9c6-4eec-b63f-5bba189744ae-config-data\") pod \"96c528a9-d9c6-4eec-b63f-5bba189744ae\" (UID: \"96c528a9-d9c6-4eec-b63f-5bba189744ae\") " Oct 11 05:08:26 crc kubenswrapper[4651]: I1011 05:08:26.358498 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tcf7n\" (UniqueName: \"kubernetes.io/projected/4f425c3a-376b-4ba1-8066-96b2d1f21698-kube-api-access-tcf7n\") pod \"4f425c3a-376b-4ba1-8066-96b2d1f21698\" (UID: \"4f425c3a-376b-4ba1-8066-96b2d1f21698\") " Oct 11 05:08:26 crc kubenswrapper[4651]: I1011 05:08:26.365111 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/96c528a9-d9c6-4eec-b63f-5bba189744ae-logs" (OuterVolumeSpecName: "logs") pod "96c528a9-d9c6-4eec-b63f-5bba189744ae" (UID: "96c528a9-d9c6-4eec-b63f-5bba189744ae"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 05:08:26 crc kubenswrapper[4651]: I1011 05:08:26.366407 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4f425c3a-376b-4ba1-8066-96b2d1f21698-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "4f425c3a-376b-4ba1-8066-96b2d1f21698" (UID: "4f425c3a-376b-4ba1-8066-96b2d1f21698"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:08:26 crc kubenswrapper[4651]: I1011 05:08:26.367743 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96c528a9-d9c6-4eec-b63f-5bba189744ae-kube-api-access-kqcc7" (OuterVolumeSpecName: "kube-api-access-kqcc7") pod "96c528a9-d9c6-4eec-b63f-5bba189744ae" (UID: "96c528a9-d9c6-4eec-b63f-5bba189744ae"). InnerVolumeSpecName "kube-api-access-kqcc7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:08:26 crc kubenswrapper[4651]: I1011 05:08:26.367852 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4f425c3a-376b-4ba1-8066-96b2d1f21698-kube-api-access-tcf7n" (OuterVolumeSpecName: "kube-api-access-tcf7n") pod "4f425c3a-376b-4ba1-8066-96b2d1f21698" (UID: "4f425c3a-376b-4ba1-8066-96b2d1f21698"). InnerVolumeSpecName "kube-api-access-tcf7n". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:08:26 crc kubenswrapper[4651]: I1011 05:08:26.382548 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96c528a9-d9c6-4eec-b63f-5bba189744ae-scripts" (OuterVolumeSpecName: "scripts") pod "96c528a9-d9c6-4eec-b63f-5bba189744ae" (UID: "96c528a9-d9c6-4eec-b63f-5bba189744ae"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:08:26 crc kubenswrapper[4651]: I1011 05:08:26.398532 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96c528a9-d9c6-4eec-b63f-5bba189744ae-config-data" (OuterVolumeSpecName: "config-data") pod "96c528a9-d9c6-4eec-b63f-5bba189744ae" (UID: "96c528a9-d9c6-4eec-b63f-5bba189744ae"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:08:26 crc kubenswrapper[4651]: I1011 05:08:26.401557 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96c528a9-d9c6-4eec-b63f-5bba189744ae-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "96c528a9-d9c6-4eec-b63f-5bba189744ae" (UID: "96c528a9-d9c6-4eec-b63f-5bba189744ae"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:08:26 crc kubenswrapper[4651]: I1011 05:08:26.402242 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4f425c3a-376b-4ba1-8066-96b2d1f21698-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4f425c3a-376b-4ba1-8066-96b2d1f21698" (UID: "4f425c3a-376b-4ba1-8066-96b2d1f21698"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:08:26 crc kubenswrapper[4651]: I1011 05:08:26.460666 4651 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/96c528a9-d9c6-4eec-b63f-5bba189744ae-scripts\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:26 crc kubenswrapper[4651]: I1011 05:08:26.460706 4651 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96c528a9-d9c6-4eec-b63f-5bba189744ae-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:26 crc kubenswrapper[4651]: I1011 05:08:26.460717 4651 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f425c3a-376b-4ba1-8066-96b2d1f21698-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:26 crc kubenswrapper[4651]: I1011 05:08:26.460725 4651 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/96c528a9-d9c6-4eec-b63f-5bba189744ae-config-data\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:26 crc kubenswrapper[4651]: I1011 05:08:26.460733 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tcf7n\" (UniqueName: \"kubernetes.io/projected/4f425c3a-376b-4ba1-8066-96b2d1f21698-kube-api-access-tcf7n\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:26 crc kubenswrapper[4651]: I1011 05:08:26.460744 4651 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/96c528a9-d9c6-4eec-b63f-5bba189744ae-logs\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:26 crc kubenswrapper[4651]: I1011 05:08:26.460752 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kqcc7\" (UniqueName: \"kubernetes.io/projected/96c528a9-d9c6-4eec-b63f-5bba189744ae-kube-api-access-kqcc7\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:26 crc kubenswrapper[4651]: I1011 05:08:26.460760 4651 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/4f425c3a-376b-4ba1-8066-96b2d1f21698-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:27 crc kubenswrapper[4651]: E1011 05:08:27.237659 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ceilometer-central-agent\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/ceilometer-0" podUID="16b0d24a-e647-4381-9f03-9b48c34ba52f" Oct 11 05:08:27 crc kubenswrapper[4651]: I1011 05:08:27.362312 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"16b0d24a-e647-4381-9f03-9b48c34ba52f","Type":"ContainerStarted","Data":"87e0554092fce6ae4459d8cc5e40b1f6a8ab63cf1bd08c794cba430853fb94c3"} Oct 11 05:08:27 crc kubenswrapper[4651]: I1011 05:08:27.362481 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="16b0d24a-e647-4381-9f03-9b48c34ba52f" containerName="ceilometer-notification-agent" containerID="cri-o://9694fee13b033094ce30c5aac1dfc12712a8e18e5878f97c2f379916d2059414" gracePeriod=30 Oct 11 05:08:27 crc kubenswrapper[4651]: I1011 05:08:27.362722 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 11 05:08:27 crc kubenswrapper[4651]: I1011 05:08:27.362964 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="16b0d24a-e647-4381-9f03-9b48c34ba52f" containerName="proxy-httpd" containerID="cri-o://87e0554092fce6ae4459d8cc5e40b1f6a8ab63cf1bd08c794cba430853fb94c3" gracePeriod=30 Oct 11 05:08:27 crc kubenswrapper[4651]: I1011 05:08:27.363013 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="16b0d24a-e647-4381-9f03-9b48c34ba52f" containerName="sg-core" containerID="cri-o://f323265d37b6f01bd3b83da9590c0d981a9001e7f7522dd167e346b2ed31a8bc" gracePeriod=30 Oct 11 05:08:27 crc kubenswrapper[4651]: I1011 05:08:27.458398 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-74cbfd888-nqwlq"] Oct 11 05:08:27 crc kubenswrapper[4651]: E1011 05:08:27.458973 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f425c3a-376b-4ba1-8066-96b2d1f21698" containerName="barbican-db-sync" Oct 11 05:08:27 crc kubenswrapper[4651]: I1011 05:08:27.459005 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f425c3a-376b-4ba1-8066-96b2d1f21698" containerName="barbican-db-sync" Oct 11 05:08:27 crc kubenswrapper[4651]: E1011 05:08:27.459035 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="96c528a9-d9c6-4eec-b63f-5bba189744ae" containerName="placement-db-sync" Oct 11 05:08:27 crc kubenswrapper[4651]: I1011 05:08:27.459049 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="96c528a9-d9c6-4eec-b63f-5bba189744ae" containerName="placement-db-sync" Oct 11 05:08:27 crc kubenswrapper[4651]: I1011 05:08:27.459284 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="4f425c3a-376b-4ba1-8066-96b2d1f21698" containerName="barbican-db-sync" Oct 11 05:08:27 crc kubenswrapper[4651]: I1011 05:08:27.459313 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="96c528a9-d9c6-4eec-b63f-5bba189744ae" containerName="placement-db-sync" Oct 11 05:08:27 crc kubenswrapper[4651]: I1011 05:08:27.468046 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-74cbfd888-nqwlq" Oct 11 05:08:27 crc kubenswrapper[4651]: I1011 05:08:27.479324 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Oct 11 05:08:27 crc kubenswrapper[4651]: I1011 05:08:27.479528 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Oct 11 05:08:27 crc kubenswrapper[4651]: I1011 05:08:27.481468 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Oct 11 05:08:27 crc kubenswrapper[4651]: I1011 05:08:27.487194 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-rzsbm" Oct 11 05:08:27 crc kubenswrapper[4651]: I1011 05:08:27.487545 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Oct 11 05:08:27 crc kubenswrapper[4651]: I1011 05:08:27.488237 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-74cbfd888-nqwlq"] Oct 11 05:08:27 crc kubenswrapper[4651]: I1011 05:08:27.494016 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c777922e-553b-44ec-84c1-4b3f6644701b-combined-ca-bundle\") pod \"placement-74cbfd888-nqwlq\" (UID: \"c777922e-553b-44ec-84c1-4b3f6644701b\") " pod="openstack/placement-74cbfd888-nqwlq" Oct 11 05:08:27 crc kubenswrapper[4651]: I1011 05:08:27.494214 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c777922e-553b-44ec-84c1-4b3f6644701b-config-data\") pod \"placement-74cbfd888-nqwlq\" (UID: \"c777922e-553b-44ec-84c1-4b3f6644701b\") " pod="openstack/placement-74cbfd888-nqwlq" Oct 11 05:08:27 crc kubenswrapper[4651]: I1011 05:08:27.494310 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c777922e-553b-44ec-84c1-4b3f6644701b-internal-tls-certs\") pod \"placement-74cbfd888-nqwlq\" (UID: \"c777922e-553b-44ec-84c1-4b3f6644701b\") " pod="openstack/placement-74cbfd888-nqwlq" Oct 11 05:08:27 crc kubenswrapper[4651]: I1011 05:08:27.494404 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c777922e-553b-44ec-84c1-4b3f6644701b-logs\") pod \"placement-74cbfd888-nqwlq\" (UID: \"c777922e-553b-44ec-84c1-4b3f6644701b\") " pod="openstack/placement-74cbfd888-nqwlq" Oct 11 05:08:27 crc kubenswrapper[4651]: I1011 05:08:27.494481 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c777922e-553b-44ec-84c1-4b3f6644701b-scripts\") pod \"placement-74cbfd888-nqwlq\" (UID: \"c777922e-553b-44ec-84c1-4b3f6644701b\") " pod="openstack/placement-74cbfd888-nqwlq" Oct 11 05:08:27 crc kubenswrapper[4651]: I1011 05:08:27.494568 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c777922e-553b-44ec-84c1-4b3f6644701b-public-tls-certs\") pod \"placement-74cbfd888-nqwlq\" (UID: \"c777922e-553b-44ec-84c1-4b3f6644701b\") " pod="openstack/placement-74cbfd888-nqwlq" Oct 11 05:08:27 crc kubenswrapper[4651]: I1011 05:08:27.494650 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lkqs9\" (UniqueName: \"kubernetes.io/projected/c777922e-553b-44ec-84c1-4b3f6644701b-kube-api-access-lkqs9\") pod \"placement-74cbfd888-nqwlq\" (UID: \"c777922e-553b-44ec-84c1-4b3f6644701b\") " pod="openstack/placement-74cbfd888-nqwlq" Oct 11 05:08:27 crc kubenswrapper[4651]: I1011 05:08:27.583978 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-6b84468647-bq8d6"] Oct 11 05:08:27 crc kubenswrapper[4651]: I1011 05:08:27.586124 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-6b84468647-bq8d6" Oct 11 05:08:27 crc kubenswrapper[4651]: I1011 05:08:27.589246 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-4bzxx" Oct 11 05:08:27 crc kubenswrapper[4651]: I1011 05:08:27.589563 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Oct 11 05:08:27 crc kubenswrapper[4651]: I1011 05:08:27.602503 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Oct 11 05:08:27 crc kubenswrapper[4651]: I1011 05:08:27.614433 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c777922e-553b-44ec-84c1-4b3f6644701b-public-tls-certs\") pod \"placement-74cbfd888-nqwlq\" (UID: \"c777922e-553b-44ec-84c1-4b3f6644701b\") " pod="openstack/placement-74cbfd888-nqwlq" Oct 11 05:08:27 crc kubenswrapper[4651]: I1011 05:08:27.614533 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lkqs9\" (UniqueName: \"kubernetes.io/projected/c777922e-553b-44ec-84c1-4b3f6644701b-kube-api-access-lkqs9\") pod \"placement-74cbfd888-nqwlq\" (UID: \"c777922e-553b-44ec-84c1-4b3f6644701b\") " pod="openstack/placement-74cbfd888-nqwlq" Oct 11 05:08:27 crc kubenswrapper[4651]: I1011 05:08:27.614637 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c777922e-553b-44ec-84c1-4b3f6644701b-combined-ca-bundle\") pod \"placement-74cbfd888-nqwlq\" (UID: \"c777922e-553b-44ec-84c1-4b3f6644701b\") " pod="openstack/placement-74cbfd888-nqwlq" Oct 11 05:08:27 crc kubenswrapper[4651]: I1011 05:08:27.614656 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0a7fc0ac-3c48-4cd1-9cbd-78eca125768d-combined-ca-bundle\") pod \"barbican-worker-6b84468647-bq8d6\" (UID: \"0a7fc0ac-3c48-4cd1-9cbd-78eca125768d\") " pod="openstack/barbican-worker-6b84468647-bq8d6" Oct 11 05:08:27 crc kubenswrapper[4651]: I1011 05:08:27.614706 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0a7fc0ac-3c48-4cd1-9cbd-78eca125768d-config-data\") pod \"barbican-worker-6b84468647-bq8d6\" (UID: \"0a7fc0ac-3c48-4cd1-9cbd-78eca125768d\") " pod="openstack/barbican-worker-6b84468647-bq8d6" Oct 11 05:08:27 crc kubenswrapper[4651]: I1011 05:08:27.614728 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0a7fc0ac-3c48-4cd1-9cbd-78eca125768d-logs\") pod \"barbican-worker-6b84468647-bq8d6\" (UID: \"0a7fc0ac-3c48-4cd1-9cbd-78eca125768d\") " pod="openstack/barbican-worker-6b84468647-bq8d6" Oct 11 05:08:27 crc kubenswrapper[4651]: I1011 05:08:27.614782 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c777922e-553b-44ec-84c1-4b3f6644701b-config-data\") pod \"placement-74cbfd888-nqwlq\" (UID: \"c777922e-553b-44ec-84c1-4b3f6644701b\") " pod="openstack/placement-74cbfd888-nqwlq" Oct 11 05:08:27 crc kubenswrapper[4651]: I1011 05:08:27.614803 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c777922e-553b-44ec-84c1-4b3f6644701b-internal-tls-certs\") pod \"placement-74cbfd888-nqwlq\" (UID: \"c777922e-553b-44ec-84c1-4b3f6644701b\") " pod="openstack/placement-74cbfd888-nqwlq" Oct 11 05:08:27 crc kubenswrapper[4651]: I1011 05:08:27.614843 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s5lh5\" (UniqueName: \"kubernetes.io/projected/0a7fc0ac-3c48-4cd1-9cbd-78eca125768d-kube-api-access-s5lh5\") pod \"barbican-worker-6b84468647-bq8d6\" (UID: \"0a7fc0ac-3c48-4cd1-9cbd-78eca125768d\") " pod="openstack/barbican-worker-6b84468647-bq8d6" Oct 11 05:08:27 crc kubenswrapper[4651]: I1011 05:08:27.614899 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0a7fc0ac-3c48-4cd1-9cbd-78eca125768d-config-data-custom\") pod \"barbican-worker-6b84468647-bq8d6\" (UID: \"0a7fc0ac-3c48-4cd1-9cbd-78eca125768d\") " pod="openstack/barbican-worker-6b84468647-bq8d6" Oct 11 05:08:27 crc kubenswrapper[4651]: I1011 05:08:27.614925 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c777922e-553b-44ec-84c1-4b3f6644701b-logs\") pod \"placement-74cbfd888-nqwlq\" (UID: \"c777922e-553b-44ec-84c1-4b3f6644701b\") " pod="openstack/placement-74cbfd888-nqwlq" Oct 11 05:08:27 crc kubenswrapper[4651]: I1011 05:08:27.614959 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c777922e-553b-44ec-84c1-4b3f6644701b-scripts\") pod \"placement-74cbfd888-nqwlq\" (UID: \"c777922e-553b-44ec-84c1-4b3f6644701b\") " pod="openstack/placement-74cbfd888-nqwlq" Oct 11 05:08:27 crc kubenswrapper[4651]: I1011 05:08:27.615044 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-6b84468647-bq8d6"] Oct 11 05:08:27 crc kubenswrapper[4651]: I1011 05:08:27.623741 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c777922e-553b-44ec-84c1-4b3f6644701b-logs\") pod \"placement-74cbfd888-nqwlq\" (UID: \"c777922e-553b-44ec-84c1-4b3f6644701b\") " pod="openstack/placement-74cbfd888-nqwlq" Oct 11 05:08:27 crc kubenswrapper[4651]: I1011 05:08:27.626831 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c777922e-553b-44ec-84c1-4b3f6644701b-combined-ca-bundle\") pod \"placement-74cbfd888-nqwlq\" (UID: \"c777922e-553b-44ec-84c1-4b3f6644701b\") " pod="openstack/placement-74cbfd888-nqwlq" Oct 11 05:08:27 crc kubenswrapper[4651]: I1011 05:08:27.627703 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c777922e-553b-44ec-84c1-4b3f6644701b-scripts\") pod \"placement-74cbfd888-nqwlq\" (UID: \"c777922e-553b-44ec-84c1-4b3f6644701b\") " pod="openstack/placement-74cbfd888-nqwlq" Oct 11 05:08:27 crc kubenswrapper[4651]: I1011 05:08:27.638099 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c777922e-553b-44ec-84c1-4b3f6644701b-config-data\") pod \"placement-74cbfd888-nqwlq\" (UID: \"c777922e-553b-44ec-84c1-4b3f6644701b\") " pod="openstack/placement-74cbfd888-nqwlq" Oct 11 05:08:27 crc kubenswrapper[4651]: I1011 05:08:27.638289 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Oct 11 05:08:27 crc kubenswrapper[4651]: I1011 05:08:27.638333 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Oct 11 05:08:27 crc kubenswrapper[4651]: I1011 05:08:27.651489 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c777922e-553b-44ec-84c1-4b3f6644701b-internal-tls-certs\") pod \"placement-74cbfd888-nqwlq\" (UID: \"c777922e-553b-44ec-84c1-4b3f6644701b\") " pod="openstack/placement-74cbfd888-nqwlq" Oct 11 05:08:27 crc kubenswrapper[4651]: I1011 05:08:27.652071 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c777922e-553b-44ec-84c1-4b3f6644701b-public-tls-certs\") pod \"placement-74cbfd888-nqwlq\" (UID: \"c777922e-553b-44ec-84c1-4b3f6644701b\") " pod="openstack/placement-74cbfd888-nqwlq" Oct 11 05:08:27 crc kubenswrapper[4651]: I1011 05:08:27.674900 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-65475785f8-ljsqt"] Oct 11 05:08:27 crc kubenswrapper[4651]: I1011 05:08:27.676346 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-65475785f8-ljsqt" Oct 11 05:08:27 crc kubenswrapper[4651]: I1011 05:08:27.682997 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Oct 11 05:08:27 crc kubenswrapper[4651]: I1011 05:08:27.732474 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3d5d4c12-9a64-4b51-9613-7d8905d3367f-logs\") pod \"barbican-keystone-listener-65475785f8-ljsqt\" (UID: \"3d5d4c12-9a64-4b51-9613-7d8905d3367f\") " pod="openstack/barbican-keystone-listener-65475785f8-ljsqt" Oct 11 05:08:27 crc kubenswrapper[4651]: I1011 05:08:27.732576 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3d5d4c12-9a64-4b51-9613-7d8905d3367f-config-data-custom\") pod \"barbican-keystone-listener-65475785f8-ljsqt\" (UID: \"3d5d4c12-9a64-4b51-9613-7d8905d3367f\") " pod="openstack/barbican-keystone-listener-65475785f8-ljsqt" Oct 11 05:08:27 crc kubenswrapper[4651]: I1011 05:08:27.732642 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d5d4c12-9a64-4b51-9613-7d8905d3367f-config-data\") pod \"barbican-keystone-listener-65475785f8-ljsqt\" (UID: \"3d5d4c12-9a64-4b51-9613-7d8905d3367f\") " pod="openstack/barbican-keystone-listener-65475785f8-ljsqt" Oct 11 05:08:27 crc kubenswrapper[4651]: I1011 05:08:27.732670 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0a7fc0ac-3c48-4cd1-9cbd-78eca125768d-combined-ca-bundle\") pod \"barbican-worker-6b84468647-bq8d6\" (UID: \"0a7fc0ac-3c48-4cd1-9cbd-78eca125768d\") " pod="openstack/barbican-worker-6b84468647-bq8d6" Oct 11 05:08:27 crc kubenswrapper[4651]: I1011 05:08:27.732727 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0a7fc0ac-3c48-4cd1-9cbd-78eca125768d-config-data\") pod \"barbican-worker-6b84468647-bq8d6\" (UID: \"0a7fc0ac-3c48-4cd1-9cbd-78eca125768d\") " pod="openstack/barbican-worker-6b84468647-bq8d6" Oct 11 05:08:27 crc kubenswrapper[4651]: I1011 05:08:27.732758 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0a7fc0ac-3c48-4cd1-9cbd-78eca125768d-logs\") pod \"barbican-worker-6b84468647-bq8d6\" (UID: \"0a7fc0ac-3c48-4cd1-9cbd-78eca125768d\") " pod="openstack/barbican-worker-6b84468647-bq8d6" Oct 11 05:08:27 crc kubenswrapper[4651]: I1011 05:08:27.732859 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d5d4c12-9a64-4b51-9613-7d8905d3367f-combined-ca-bundle\") pod \"barbican-keystone-listener-65475785f8-ljsqt\" (UID: \"3d5d4c12-9a64-4b51-9613-7d8905d3367f\") " pod="openstack/barbican-keystone-listener-65475785f8-ljsqt" Oct 11 05:08:27 crc kubenswrapper[4651]: I1011 05:08:27.732889 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lvfz8\" (UniqueName: \"kubernetes.io/projected/3d5d4c12-9a64-4b51-9613-7d8905d3367f-kube-api-access-lvfz8\") pod \"barbican-keystone-listener-65475785f8-ljsqt\" (UID: \"3d5d4c12-9a64-4b51-9613-7d8905d3367f\") " pod="openstack/barbican-keystone-listener-65475785f8-ljsqt" Oct 11 05:08:27 crc kubenswrapper[4651]: I1011 05:08:27.732929 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s5lh5\" (UniqueName: \"kubernetes.io/projected/0a7fc0ac-3c48-4cd1-9cbd-78eca125768d-kube-api-access-s5lh5\") pod \"barbican-worker-6b84468647-bq8d6\" (UID: \"0a7fc0ac-3c48-4cd1-9cbd-78eca125768d\") " pod="openstack/barbican-worker-6b84468647-bq8d6" Oct 11 05:08:27 crc kubenswrapper[4651]: I1011 05:08:27.732979 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0a7fc0ac-3c48-4cd1-9cbd-78eca125768d-config-data-custom\") pod \"barbican-worker-6b84468647-bq8d6\" (UID: \"0a7fc0ac-3c48-4cd1-9cbd-78eca125768d\") " pod="openstack/barbican-worker-6b84468647-bq8d6" Oct 11 05:08:27 crc kubenswrapper[4651]: I1011 05:08:27.748064 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0a7fc0ac-3c48-4cd1-9cbd-78eca125768d-config-data-custom\") pod \"barbican-worker-6b84468647-bq8d6\" (UID: \"0a7fc0ac-3c48-4cd1-9cbd-78eca125768d\") " pod="openstack/barbican-worker-6b84468647-bq8d6" Oct 11 05:08:27 crc kubenswrapper[4651]: I1011 05:08:27.753776 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0a7fc0ac-3c48-4cd1-9cbd-78eca125768d-combined-ca-bundle\") pod \"barbican-worker-6b84468647-bq8d6\" (UID: \"0a7fc0ac-3c48-4cd1-9cbd-78eca125768d\") " pod="openstack/barbican-worker-6b84468647-bq8d6" Oct 11 05:08:27 crc kubenswrapper[4651]: I1011 05:08:27.765529 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Oct 11 05:08:27 crc kubenswrapper[4651]: I1011 05:08:27.783617 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0a7fc0ac-3c48-4cd1-9cbd-78eca125768d-logs\") pod \"barbican-worker-6b84468647-bq8d6\" (UID: \"0a7fc0ac-3c48-4cd1-9cbd-78eca125768d\") " pod="openstack/barbican-worker-6b84468647-bq8d6" Oct 11 05:08:27 crc kubenswrapper[4651]: I1011 05:08:27.784855 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lkqs9\" (UniqueName: \"kubernetes.io/projected/c777922e-553b-44ec-84c1-4b3f6644701b-kube-api-access-lkqs9\") pod \"placement-74cbfd888-nqwlq\" (UID: \"c777922e-553b-44ec-84c1-4b3f6644701b\") " pod="openstack/placement-74cbfd888-nqwlq" Oct 11 05:08:27 crc kubenswrapper[4651]: I1011 05:08:27.784924 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0a7fc0ac-3c48-4cd1-9cbd-78eca125768d-config-data\") pod \"barbican-worker-6b84468647-bq8d6\" (UID: \"0a7fc0ac-3c48-4cd1-9cbd-78eca125768d\") " pod="openstack/barbican-worker-6b84468647-bq8d6" Oct 11 05:08:27 crc kubenswrapper[4651]: I1011 05:08:27.790311 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s5lh5\" (UniqueName: \"kubernetes.io/projected/0a7fc0ac-3c48-4cd1-9cbd-78eca125768d-kube-api-access-s5lh5\") pod \"barbican-worker-6b84468647-bq8d6\" (UID: \"0a7fc0ac-3c48-4cd1-9cbd-78eca125768d\") " pod="openstack/barbican-worker-6b84468647-bq8d6" Oct 11 05:08:27 crc kubenswrapper[4651]: I1011 05:08:27.806461 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Oct 11 05:08:27 crc kubenswrapper[4651]: I1011 05:08:27.832357 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-65475785f8-ljsqt"] Oct 11 05:08:27 crc kubenswrapper[4651]: I1011 05:08:27.835295 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d5d4c12-9a64-4b51-9613-7d8905d3367f-combined-ca-bundle\") pod \"barbican-keystone-listener-65475785f8-ljsqt\" (UID: \"3d5d4c12-9a64-4b51-9613-7d8905d3367f\") " pod="openstack/barbican-keystone-listener-65475785f8-ljsqt" Oct 11 05:08:27 crc kubenswrapper[4651]: I1011 05:08:27.835357 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lvfz8\" (UniqueName: \"kubernetes.io/projected/3d5d4c12-9a64-4b51-9613-7d8905d3367f-kube-api-access-lvfz8\") pod \"barbican-keystone-listener-65475785f8-ljsqt\" (UID: \"3d5d4c12-9a64-4b51-9613-7d8905d3367f\") " pod="openstack/barbican-keystone-listener-65475785f8-ljsqt" Oct 11 05:08:27 crc kubenswrapper[4651]: I1011 05:08:27.836220 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3d5d4c12-9a64-4b51-9613-7d8905d3367f-logs\") pod \"barbican-keystone-listener-65475785f8-ljsqt\" (UID: \"3d5d4c12-9a64-4b51-9613-7d8905d3367f\") " pod="openstack/barbican-keystone-listener-65475785f8-ljsqt" Oct 11 05:08:27 crc kubenswrapper[4651]: I1011 05:08:27.836259 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3d5d4c12-9a64-4b51-9613-7d8905d3367f-config-data-custom\") pod \"barbican-keystone-listener-65475785f8-ljsqt\" (UID: \"3d5d4c12-9a64-4b51-9613-7d8905d3367f\") " pod="openstack/barbican-keystone-listener-65475785f8-ljsqt" Oct 11 05:08:27 crc kubenswrapper[4651]: I1011 05:08:27.836313 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d5d4c12-9a64-4b51-9613-7d8905d3367f-config-data\") pod \"barbican-keystone-listener-65475785f8-ljsqt\" (UID: \"3d5d4c12-9a64-4b51-9613-7d8905d3367f\") " pod="openstack/barbican-keystone-listener-65475785f8-ljsqt" Oct 11 05:08:27 crc kubenswrapper[4651]: I1011 05:08:27.837292 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3d5d4c12-9a64-4b51-9613-7d8905d3367f-logs\") pod \"barbican-keystone-listener-65475785f8-ljsqt\" (UID: \"3d5d4c12-9a64-4b51-9613-7d8905d3367f\") " pod="openstack/barbican-keystone-listener-65475785f8-ljsqt" Oct 11 05:08:27 crc kubenswrapper[4651]: I1011 05:08:27.843509 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d5d4c12-9a64-4b51-9613-7d8905d3367f-combined-ca-bundle\") pod \"barbican-keystone-listener-65475785f8-ljsqt\" (UID: \"3d5d4c12-9a64-4b51-9613-7d8905d3367f\") " pod="openstack/barbican-keystone-listener-65475785f8-ljsqt" Oct 11 05:08:27 crc kubenswrapper[4651]: I1011 05:08:27.847323 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d5d4c12-9a64-4b51-9613-7d8905d3367f-config-data\") pod \"barbican-keystone-listener-65475785f8-ljsqt\" (UID: \"3d5d4c12-9a64-4b51-9613-7d8905d3367f\") " pod="openstack/barbican-keystone-listener-65475785f8-ljsqt" Oct 11 05:08:27 crc kubenswrapper[4651]: I1011 05:08:27.852480 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3d5d4c12-9a64-4b51-9613-7d8905d3367f-config-data-custom\") pod \"barbican-keystone-listener-65475785f8-ljsqt\" (UID: \"3d5d4c12-9a64-4b51-9613-7d8905d3367f\") " pod="openstack/barbican-keystone-listener-65475785f8-ljsqt" Oct 11 05:08:27 crc kubenswrapper[4651]: I1011 05:08:27.858283 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lvfz8\" (UniqueName: \"kubernetes.io/projected/3d5d4c12-9a64-4b51-9613-7d8905d3367f-kube-api-access-lvfz8\") pod \"barbican-keystone-listener-65475785f8-ljsqt\" (UID: \"3d5d4c12-9a64-4b51-9613-7d8905d3367f\") " pod="openstack/barbican-keystone-listener-65475785f8-ljsqt" Oct 11 05:08:27 crc kubenswrapper[4651]: I1011 05:08:27.859880 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-74cbfd888-nqwlq" Oct 11 05:08:27 crc kubenswrapper[4651]: I1011 05:08:27.921974 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-688c87cc99-nmtk4"] Oct 11 05:08:27 crc kubenswrapper[4651]: I1011 05:08:27.925122 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-6f5cf48bd-n6csr"] Oct 11 05:08:27 crc kubenswrapper[4651]: I1011 05:08:27.925239 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-688c87cc99-nmtk4" Oct 11 05:08:27 crc kubenswrapper[4651]: I1011 05:08:27.929917 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-688c87cc99-nmtk4"] Oct 11 05:08:27 crc kubenswrapper[4651]: I1011 05:08:27.929974 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-6f5cf48bd-n6csr"] Oct 11 05:08:27 crc kubenswrapper[4651]: I1011 05:08:27.930066 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6f5cf48bd-n6csr" Oct 11 05:08:27 crc kubenswrapper[4651]: I1011 05:08:27.938527 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Oct 11 05:08:28 crc kubenswrapper[4651]: I1011 05:08:28.034643 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-6b84468647-bq8d6" Oct 11 05:08:28 crc kubenswrapper[4651]: I1011 05:08:28.040901 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/449ef777-d7ea-4103-be80-917dca65751f-config\") pod \"dnsmasq-dns-688c87cc99-nmtk4\" (UID: \"449ef777-d7ea-4103-be80-917dca65751f\") " pod="openstack/dnsmasq-dns-688c87cc99-nmtk4" Oct 11 05:08:28 crc kubenswrapper[4651]: I1011 05:08:28.040969 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9b7de539-a960-4684-89a8-80cf59a5616f-config-data-custom\") pod \"barbican-api-6f5cf48bd-n6csr\" (UID: \"9b7de539-a960-4684-89a8-80cf59a5616f\") " pod="openstack/barbican-api-6f5cf48bd-n6csr" Oct 11 05:08:28 crc kubenswrapper[4651]: I1011 05:08:28.041117 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9b7de539-a960-4684-89a8-80cf59a5616f-logs\") pod \"barbican-api-6f5cf48bd-n6csr\" (UID: \"9b7de539-a960-4684-89a8-80cf59a5616f\") " pod="openstack/barbican-api-6f5cf48bd-n6csr" Oct 11 05:08:28 crc kubenswrapper[4651]: I1011 05:08:28.041201 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/449ef777-d7ea-4103-be80-917dca65751f-dns-svc\") pod \"dnsmasq-dns-688c87cc99-nmtk4\" (UID: \"449ef777-d7ea-4103-be80-917dca65751f\") " pod="openstack/dnsmasq-dns-688c87cc99-nmtk4" Oct 11 05:08:28 crc kubenswrapper[4651]: I1011 05:08:28.041216 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/449ef777-d7ea-4103-be80-917dca65751f-ovsdbserver-nb\") pod \"dnsmasq-dns-688c87cc99-nmtk4\" (UID: \"449ef777-d7ea-4103-be80-917dca65751f\") " pod="openstack/dnsmasq-dns-688c87cc99-nmtk4" Oct 11 05:08:28 crc kubenswrapper[4651]: I1011 05:08:28.041239 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/449ef777-d7ea-4103-be80-917dca65751f-ovsdbserver-sb\") pod \"dnsmasq-dns-688c87cc99-nmtk4\" (UID: \"449ef777-d7ea-4103-be80-917dca65751f\") " pod="openstack/dnsmasq-dns-688c87cc99-nmtk4" Oct 11 05:08:28 crc kubenswrapper[4651]: I1011 05:08:28.041280 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tldqg\" (UniqueName: \"kubernetes.io/projected/449ef777-d7ea-4103-be80-917dca65751f-kube-api-access-tldqg\") pod \"dnsmasq-dns-688c87cc99-nmtk4\" (UID: \"449ef777-d7ea-4103-be80-917dca65751f\") " pod="openstack/dnsmasq-dns-688c87cc99-nmtk4" Oct 11 05:08:28 crc kubenswrapper[4651]: I1011 05:08:28.041375 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b7de539-a960-4684-89a8-80cf59a5616f-combined-ca-bundle\") pod \"barbican-api-6f5cf48bd-n6csr\" (UID: \"9b7de539-a960-4684-89a8-80cf59a5616f\") " pod="openstack/barbican-api-6f5cf48bd-n6csr" Oct 11 05:08:28 crc kubenswrapper[4651]: I1011 05:08:28.041467 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9b7de539-a960-4684-89a8-80cf59a5616f-config-data\") pod \"barbican-api-6f5cf48bd-n6csr\" (UID: \"9b7de539-a960-4684-89a8-80cf59a5616f\") " pod="openstack/barbican-api-6f5cf48bd-n6csr" Oct 11 05:08:28 crc kubenswrapper[4651]: I1011 05:08:28.041485 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/449ef777-d7ea-4103-be80-917dca65751f-dns-swift-storage-0\") pod \"dnsmasq-dns-688c87cc99-nmtk4\" (UID: \"449ef777-d7ea-4103-be80-917dca65751f\") " pod="openstack/dnsmasq-dns-688c87cc99-nmtk4" Oct 11 05:08:28 crc kubenswrapper[4651]: I1011 05:08:28.041506 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-89hff\" (UniqueName: \"kubernetes.io/projected/9b7de539-a960-4684-89a8-80cf59a5616f-kube-api-access-89hff\") pod \"barbican-api-6f5cf48bd-n6csr\" (UID: \"9b7de539-a960-4684-89a8-80cf59a5616f\") " pod="openstack/barbican-api-6f5cf48bd-n6csr" Oct 11 05:08:28 crc kubenswrapper[4651]: I1011 05:08:28.071280 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-65475785f8-ljsqt" Oct 11 05:08:28 crc kubenswrapper[4651]: I1011 05:08:28.144896 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/449ef777-d7ea-4103-be80-917dca65751f-config\") pod \"dnsmasq-dns-688c87cc99-nmtk4\" (UID: \"449ef777-d7ea-4103-be80-917dca65751f\") " pod="openstack/dnsmasq-dns-688c87cc99-nmtk4" Oct 11 05:08:28 crc kubenswrapper[4651]: I1011 05:08:28.144948 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9b7de539-a960-4684-89a8-80cf59a5616f-config-data-custom\") pod \"barbican-api-6f5cf48bd-n6csr\" (UID: \"9b7de539-a960-4684-89a8-80cf59a5616f\") " pod="openstack/barbican-api-6f5cf48bd-n6csr" Oct 11 05:08:28 crc kubenswrapper[4651]: I1011 05:08:28.144986 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9b7de539-a960-4684-89a8-80cf59a5616f-logs\") pod \"barbican-api-6f5cf48bd-n6csr\" (UID: \"9b7de539-a960-4684-89a8-80cf59a5616f\") " pod="openstack/barbican-api-6f5cf48bd-n6csr" Oct 11 05:08:28 crc kubenswrapper[4651]: I1011 05:08:28.145031 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/449ef777-d7ea-4103-be80-917dca65751f-dns-svc\") pod \"dnsmasq-dns-688c87cc99-nmtk4\" (UID: \"449ef777-d7ea-4103-be80-917dca65751f\") " pod="openstack/dnsmasq-dns-688c87cc99-nmtk4" Oct 11 05:08:28 crc kubenswrapper[4651]: I1011 05:08:28.145053 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/449ef777-d7ea-4103-be80-917dca65751f-ovsdbserver-nb\") pod \"dnsmasq-dns-688c87cc99-nmtk4\" (UID: \"449ef777-d7ea-4103-be80-917dca65751f\") " pod="openstack/dnsmasq-dns-688c87cc99-nmtk4" Oct 11 05:08:28 crc kubenswrapper[4651]: I1011 05:08:28.145073 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/449ef777-d7ea-4103-be80-917dca65751f-ovsdbserver-sb\") pod \"dnsmasq-dns-688c87cc99-nmtk4\" (UID: \"449ef777-d7ea-4103-be80-917dca65751f\") " pod="openstack/dnsmasq-dns-688c87cc99-nmtk4" Oct 11 05:08:28 crc kubenswrapper[4651]: I1011 05:08:28.145100 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tldqg\" (UniqueName: \"kubernetes.io/projected/449ef777-d7ea-4103-be80-917dca65751f-kube-api-access-tldqg\") pod \"dnsmasq-dns-688c87cc99-nmtk4\" (UID: \"449ef777-d7ea-4103-be80-917dca65751f\") " pod="openstack/dnsmasq-dns-688c87cc99-nmtk4" Oct 11 05:08:28 crc kubenswrapper[4651]: I1011 05:08:28.145132 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b7de539-a960-4684-89a8-80cf59a5616f-combined-ca-bundle\") pod \"barbican-api-6f5cf48bd-n6csr\" (UID: \"9b7de539-a960-4684-89a8-80cf59a5616f\") " pod="openstack/barbican-api-6f5cf48bd-n6csr" Oct 11 05:08:28 crc kubenswrapper[4651]: I1011 05:08:28.145178 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9b7de539-a960-4684-89a8-80cf59a5616f-config-data\") pod \"barbican-api-6f5cf48bd-n6csr\" (UID: \"9b7de539-a960-4684-89a8-80cf59a5616f\") " pod="openstack/barbican-api-6f5cf48bd-n6csr" Oct 11 05:08:28 crc kubenswrapper[4651]: I1011 05:08:28.145207 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/449ef777-d7ea-4103-be80-917dca65751f-dns-swift-storage-0\") pod \"dnsmasq-dns-688c87cc99-nmtk4\" (UID: \"449ef777-d7ea-4103-be80-917dca65751f\") " pod="openstack/dnsmasq-dns-688c87cc99-nmtk4" Oct 11 05:08:28 crc kubenswrapper[4651]: I1011 05:08:28.145268 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-89hff\" (UniqueName: \"kubernetes.io/projected/9b7de539-a960-4684-89a8-80cf59a5616f-kube-api-access-89hff\") pod \"barbican-api-6f5cf48bd-n6csr\" (UID: \"9b7de539-a960-4684-89a8-80cf59a5616f\") " pod="openstack/barbican-api-6f5cf48bd-n6csr" Oct 11 05:08:28 crc kubenswrapper[4651]: I1011 05:08:28.146163 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/449ef777-d7ea-4103-be80-917dca65751f-config\") pod \"dnsmasq-dns-688c87cc99-nmtk4\" (UID: \"449ef777-d7ea-4103-be80-917dca65751f\") " pod="openstack/dnsmasq-dns-688c87cc99-nmtk4" Oct 11 05:08:28 crc kubenswrapper[4651]: I1011 05:08:28.146271 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/449ef777-d7ea-4103-be80-917dca65751f-ovsdbserver-nb\") pod \"dnsmasq-dns-688c87cc99-nmtk4\" (UID: \"449ef777-d7ea-4103-be80-917dca65751f\") " pod="openstack/dnsmasq-dns-688c87cc99-nmtk4" Oct 11 05:08:28 crc kubenswrapper[4651]: I1011 05:08:28.146390 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/449ef777-d7ea-4103-be80-917dca65751f-ovsdbserver-sb\") pod \"dnsmasq-dns-688c87cc99-nmtk4\" (UID: \"449ef777-d7ea-4103-be80-917dca65751f\") " pod="openstack/dnsmasq-dns-688c87cc99-nmtk4" Oct 11 05:08:28 crc kubenswrapper[4651]: I1011 05:08:28.146547 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/449ef777-d7ea-4103-be80-917dca65751f-dns-swift-storage-0\") pod \"dnsmasq-dns-688c87cc99-nmtk4\" (UID: \"449ef777-d7ea-4103-be80-917dca65751f\") " pod="openstack/dnsmasq-dns-688c87cc99-nmtk4" Oct 11 05:08:28 crc kubenswrapper[4651]: I1011 05:08:28.146919 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/449ef777-d7ea-4103-be80-917dca65751f-dns-svc\") pod \"dnsmasq-dns-688c87cc99-nmtk4\" (UID: \"449ef777-d7ea-4103-be80-917dca65751f\") " pod="openstack/dnsmasq-dns-688c87cc99-nmtk4" Oct 11 05:08:28 crc kubenswrapper[4651]: I1011 05:08:28.147243 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9b7de539-a960-4684-89a8-80cf59a5616f-logs\") pod \"barbican-api-6f5cf48bd-n6csr\" (UID: \"9b7de539-a960-4684-89a8-80cf59a5616f\") " pod="openstack/barbican-api-6f5cf48bd-n6csr" Oct 11 05:08:28 crc kubenswrapper[4651]: I1011 05:08:28.170660 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b7de539-a960-4684-89a8-80cf59a5616f-combined-ca-bundle\") pod \"barbican-api-6f5cf48bd-n6csr\" (UID: \"9b7de539-a960-4684-89a8-80cf59a5616f\") " pod="openstack/barbican-api-6f5cf48bd-n6csr" Oct 11 05:08:28 crc kubenswrapper[4651]: I1011 05:08:28.171242 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9b7de539-a960-4684-89a8-80cf59a5616f-config-data\") pod \"barbican-api-6f5cf48bd-n6csr\" (UID: \"9b7de539-a960-4684-89a8-80cf59a5616f\") " pod="openstack/barbican-api-6f5cf48bd-n6csr" Oct 11 05:08:28 crc kubenswrapper[4651]: I1011 05:08:28.176038 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-89hff\" (UniqueName: \"kubernetes.io/projected/9b7de539-a960-4684-89a8-80cf59a5616f-kube-api-access-89hff\") pod \"barbican-api-6f5cf48bd-n6csr\" (UID: \"9b7de539-a960-4684-89a8-80cf59a5616f\") " pod="openstack/barbican-api-6f5cf48bd-n6csr" Oct 11 05:08:28 crc kubenswrapper[4651]: I1011 05:08:28.177772 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9b7de539-a960-4684-89a8-80cf59a5616f-config-data-custom\") pod \"barbican-api-6f5cf48bd-n6csr\" (UID: \"9b7de539-a960-4684-89a8-80cf59a5616f\") " pod="openstack/barbican-api-6f5cf48bd-n6csr" Oct 11 05:08:28 crc kubenswrapper[4651]: I1011 05:08:28.190476 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tldqg\" (UniqueName: \"kubernetes.io/projected/449ef777-d7ea-4103-be80-917dca65751f-kube-api-access-tldqg\") pod \"dnsmasq-dns-688c87cc99-nmtk4\" (UID: \"449ef777-d7ea-4103-be80-917dca65751f\") " pod="openstack/dnsmasq-dns-688c87cc99-nmtk4" Oct 11 05:08:28 crc kubenswrapper[4651]: I1011 05:08:28.254223 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-688c87cc99-nmtk4" Oct 11 05:08:28 crc kubenswrapper[4651]: I1011 05:08:28.271633 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6f5cf48bd-n6csr" Oct 11 05:08:28 crc kubenswrapper[4651]: I1011 05:08:28.272549 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-74cbfd888-nqwlq"] Oct 11 05:08:28 crc kubenswrapper[4651]: I1011 05:08:28.371890 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-8rxgw" event={"ID":"36b45d75-4e52-49b7-b7d7-13d53d2f7076","Type":"ContainerStarted","Data":"97eb21467ac13a278c5276c9ef33b51b684f94f0ddbeea3ea84e7606baaf3708"} Oct 11 05:08:28 crc kubenswrapper[4651]: I1011 05:08:28.378050 4651 generic.go:334] "Generic (PLEG): container finished" podID="16b0d24a-e647-4381-9f03-9b48c34ba52f" containerID="87e0554092fce6ae4459d8cc5e40b1f6a8ab63cf1bd08c794cba430853fb94c3" exitCode=0 Oct 11 05:08:28 crc kubenswrapper[4651]: I1011 05:08:28.378073 4651 generic.go:334] "Generic (PLEG): container finished" podID="16b0d24a-e647-4381-9f03-9b48c34ba52f" containerID="f323265d37b6f01bd3b83da9590c0d981a9001e7f7522dd167e346b2ed31a8bc" exitCode=2 Oct 11 05:08:28 crc kubenswrapper[4651]: I1011 05:08:28.378111 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"16b0d24a-e647-4381-9f03-9b48c34ba52f","Type":"ContainerDied","Data":"87e0554092fce6ae4459d8cc5e40b1f6a8ab63cf1bd08c794cba430853fb94c3"} Oct 11 05:08:28 crc kubenswrapper[4651]: I1011 05:08:28.378128 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"16b0d24a-e647-4381-9f03-9b48c34ba52f","Type":"ContainerDied","Data":"f323265d37b6f01bd3b83da9590c0d981a9001e7f7522dd167e346b2ed31a8bc"} Oct 11 05:08:28 crc kubenswrapper[4651]: I1011 05:08:28.380112 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-74cbfd888-nqwlq" event={"ID":"c777922e-553b-44ec-84c1-4b3f6644701b","Type":"ContainerStarted","Data":"1d45dd118d83bf4726707450ce0f735fff7463267c5db1fcdeb5da985e48b7d6"} Oct 11 05:08:28 crc kubenswrapper[4651]: I1011 05:08:28.380139 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Oct 11 05:08:28 crc kubenswrapper[4651]: I1011 05:08:28.380241 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Oct 11 05:08:28 crc kubenswrapper[4651]: I1011 05:08:28.401210 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-8rxgw" podStartSLOduration=3.696496493 podStartE2EDuration="52.401190864s" podCreationTimestamp="2025-10-11 05:07:36 +0000 UTC" firstStartedPulling="2025-10-11 05:07:38.199498161 +0000 UTC m=+979.095730957" lastFinishedPulling="2025-10-11 05:08:26.904192522 +0000 UTC m=+1027.800425328" observedRunningTime="2025-10-11 05:08:28.392134483 +0000 UTC m=+1029.288367279" watchObservedRunningTime="2025-10-11 05:08:28.401190864 +0000 UTC m=+1029.297423660" Oct 11 05:08:28 crc kubenswrapper[4651]: I1011 05:08:28.601887 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-6b84468647-bq8d6"] Oct 11 05:08:28 crc kubenswrapper[4651]: I1011 05:08:28.645559 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-65475785f8-ljsqt"] Oct 11 05:08:28 crc kubenswrapper[4651]: I1011 05:08:28.687537 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-688c87cc99-nmtk4"] Oct 11 05:08:28 crc kubenswrapper[4651]: I1011 05:08:28.822345 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-6f5cf48bd-n6csr"] Oct 11 05:08:29 crc kubenswrapper[4651]: I1011 05:08:29.391578 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6f5cf48bd-n6csr" event={"ID":"9b7de539-a960-4684-89a8-80cf59a5616f","Type":"ContainerStarted","Data":"ad5cd4c6b13e2dc39cd4c72bd325768513f721c6738addc8bcf4d1bc4b56d18e"} Oct 11 05:08:29 crc kubenswrapper[4651]: I1011 05:08:29.391965 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6f5cf48bd-n6csr" event={"ID":"9b7de539-a960-4684-89a8-80cf59a5616f","Type":"ContainerStarted","Data":"62abaffe16727724b3354df55308d8a64f2ef7df9fd9d81d3a846f41193d3d43"} Oct 11 05:08:29 crc kubenswrapper[4651]: I1011 05:08:29.391976 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6f5cf48bd-n6csr" event={"ID":"9b7de539-a960-4684-89a8-80cf59a5616f","Type":"ContainerStarted","Data":"6e2179fae02f6190cc3e457973e99af7d5ae8a43d9d4c4a5a6f242ed4b8a279f"} Oct 11 05:08:29 crc kubenswrapper[4651]: I1011 05:08:29.393153 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-6f5cf48bd-n6csr" Oct 11 05:08:29 crc kubenswrapper[4651]: I1011 05:08:29.393175 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-6f5cf48bd-n6csr" Oct 11 05:08:29 crc kubenswrapper[4651]: I1011 05:08:29.396967 4651 generic.go:334] "Generic (PLEG): container finished" podID="449ef777-d7ea-4103-be80-917dca65751f" containerID="9d44b32769f0c427c8c603745ad60c500fbcd0ee1db183bf8b24c817e544662b" exitCode=0 Oct 11 05:08:29 crc kubenswrapper[4651]: I1011 05:08:29.397370 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-688c87cc99-nmtk4" event={"ID":"449ef777-d7ea-4103-be80-917dca65751f","Type":"ContainerDied","Data":"9d44b32769f0c427c8c603745ad60c500fbcd0ee1db183bf8b24c817e544662b"} Oct 11 05:08:29 crc kubenswrapper[4651]: I1011 05:08:29.397398 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-688c87cc99-nmtk4" event={"ID":"449ef777-d7ea-4103-be80-917dca65751f","Type":"ContainerStarted","Data":"43aa7d9377aa3c716312382a505a98bf04ded736d9b24bef597e095c0f213cf5"} Oct 11 05:08:29 crc kubenswrapper[4651]: I1011 05:08:29.403001 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-74cbfd888-nqwlq" event={"ID":"c777922e-553b-44ec-84c1-4b3f6644701b","Type":"ContainerStarted","Data":"1f316445ff43489a3d6fa1aa30cc0a7b7a7e3b5af852fa54eab89e3fe6f81d2a"} Oct 11 05:08:29 crc kubenswrapper[4651]: I1011 05:08:29.405447 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-74cbfd888-nqwlq" event={"ID":"c777922e-553b-44ec-84c1-4b3f6644701b","Type":"ContainerStarted","Data":"295b52a5f161905601529c5a21179f41a7ab41b6d9b7b8b77ef49e06743640d2"} Oct 11 05:08:29 crc kubenswrapper[4651]: I1011 05:08:29.405475 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-74cbfd888-nqwlq" Oct 11 05:08:29 crc kubenswrapper[4651]: I1011 05:08:29.405488 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-74cbfd888-nqwlq" Oct 11 05:08:29 crc kubenswrapper[4651]: I1011 05:08:29.406557 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-65475785f8-ljsqt" event={"ID":"3d5d4c12-9a64-4b51-9613-7d8905d3367f","Type":"ContainerStarted","Data":"806e2c54107575501fd4dab7acbc64c52d75bb16e1540b9d0ff1e1b76cc60d73"} Oct 11 05:08:29 crc kubenswrapper[4651]: I1011 05:08:29.408009 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-6b84468647-bq8d6" event={"ID":"0a7fc0ac-3c48-4cd1-9cbd-78eca125768d","Type":"ContainerStarted","Data":"c953dd2b7550afaacee79f7afd18fe697cff1a0f0c90b73c715b35c3d7b85ee7"} Oct 11 05:08:29 crc kubenswrapper[4651]: I1011 05:08:29.420737 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-6f5cf48bd-n6csr" podStartSLOduration=2.420713327 podStartE2EDuration="2.420713327s" podCreationTimestamp="2025-10-11 05:08:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 05:08:29.414897129 +0000 UTC m=+1030.311129935" watchObservedRunningTime="2025-10-11 05:08:29.420713327 +0000 UTC m=+1030.316946123" Oct 11 05:08:29 crc kubenswrapper[4651]: I1011 05:08:29.449110 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-74cbfd888-nqwlq" podStartSLOduration=2.449075989 podStartE2EDuration="2.449075989s" podCreationTimestamp="2025-10-11 05:08:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 05:08:29.440556652 +0000 UTC m=+1030.336789468" watchObservedRunningTime="2025-10-11 05:08:29.449075989 +0000 UTC m=+1030.345308785" Oct 11 05:08:30 crc kubenswrapper[4651]: I1011 05:08:30.419546 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-688c87cc99-nmtk4" event={"ID":"449ef777-d7ea-4103-be80-917dca65751f","Type":"ContainerStarted","Data":"e11ef0c5ab87a73e934163863a1bea0b8b3d8f0c49c7642b55e98262ad4852cf"} Oct 11 05:08:30 crc kubenswrapper[4651]: I1011 05:08:30.420482 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-688c87cc99-nmtk4" Oct 11 05:08:30 crc kubenswrapper[4651]: I1011 05:08:30.420684 4651 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 11 05:08:30 crc kubenswrapper[4651]: I1011 05:08:30.420702 4651 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 11 05:08:30 crc kubenswrapper[4651]: I1011 05:08:30.451674 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-688c87cc99-nmtk4" podStartSLOduration=3.451649751 podStartE2EDuration="3.451649751s" podCreationTimestamp="2025-10-11 05:08:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 05:08:30.447578567 +0000 UTC m=+1031.343811373" watchObservedRunningTime="2025-10-11 05:08:30.451649751 +0000 UTC m=+1031.347882537" Oct 11 05:08:30 crc kubenswrapper[4651]: I1011 05:08:30.487939 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-694f8cb944-jlqsz"] Oct 11 05:08:30 crc kubenswrapper[4651]: I1011 05:08:30.495869 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-694f8cb944-jlqsz" Oct 11 05:08:30 crc kubenswrapper[4651]: I1011 05:08:30.502262 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Oct 11 05:08:30 crc kubenswrapper[4651]: I1011 05:08:30.502568 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Oct 11 05:08:30 crc kubenswrapper[4651]: I1011 05:08:30.515569 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-694f8cb944-jlqsz"] Oct 11 05:08:30 crc kubenswrapper[4651]: I1011 05:08:30.604205 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7tvkx\" (UniqueName: \"kubernetes.io/projected/5c85be3f-c6fd-4d66-95ba-87b1502b5548-kube-api-access-7tvkx\") pod \"barbican-api-694f8cb944-jlqsz\" (UID: \"5c85be3f-c6fd-4d66-95ba-87b1502b5548\") " pod="openstack/barbican-api-694f8cb944-jlqsz" Oct 11 05:08:30 crc kubenswrapper[4651]: I1011 05:08:30.604269 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c85be3f-c6fd-4d66-95ba-87b1502b5548-combined-ca-bundle\") pod \"barbican-api-694f8cb944-jlqsz\" (UID: \"5c85be3f-c6fd-4d66-95ba-87b1502b5548\") " pod="openstack/barbican-api-694f8cb944-jlqsz" Oct 11 05:08:30 crc kubenswrapper[4651]: I1011 05:08:30.604327 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5c85be3f-c6fd-4d66-95ba-87b1502b5548-logs\") pod \"barbican-api-694f8cb944-jlqsz\" (UID: \"5c85be3f-c6fd-4d66-95ba-87b1502b5548\") " pod="openstack/barbican-api-694f8cb944-jlqsz" Oct 11 05:08:30 crc kubenswrapper[4651]: I1011 05:08:30.604381 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5c85be3f-c6fd-4d66-95ba-87b1502b5548-public-tls-certs\") pod \"barbican-api-694f8cb944-jlqsz\" (UID: \"5c85be3f-c6fd-4d66-95ba-87b1502b5548\") " pod="openstack/barbican-api-694f8cb944-jlqsz" Oct 11 05:08:30 crc kubenswrapper[4651]: I1011 05:08:30.604409 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c85be3f-c6fd-4d66-95ba-87b1502b5548-config-data\") pod \"barbican-api-694f8cb944-jlqsz\" (UID: \"5c85be3f-c6fd-4d66-95ba-87b1502b5548\") " pod="openstack/barbican-api-694f8cb944-jlqsz" Oct 11 05:08:30 crc kubenswrapper[4651]: I1011 05:08:30.604447 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5c85be3f-c6fd-4d66-95ba-87b1502b5548-internal-tls-certs\") pod \"barbican-api-694f8cb944-jlqsz\" (UID: \"5c85be3f-c6fd-4d66-95ba-87b1502b5548\") " pod="openstack/barbican-api-694f8cb944-jlqsz" Oct 11 05:08:30 crc kubenswrapper[4651]: I1011 05:08:30.604478 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5c85be3f-c6fd-4d66-95ba-87b1502b5548-config-data-custom\") pod \"barbican-api-694f8cb944-jlqsz\" (UID: \"5c85be3f-c6fd-4d66-95ba-87b1502b5548\") " pod="openstack/barbican-api-694f8cb944-jlqsz" Oct 11 05:08:30 crc kubenswrapper[4651]: I1011 05:08:30.706706 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c85be3f-c6fd-4d66-95ba-87b1502b5548-config-data\") pod \"barbican-api-694f8cb944-jlqsz\" (UID: \"5c85be3f-c6fd-4d66-95ba-87b1502b5548\") " pod="openstack/barbican-api-694f8cb944-jlqsz" Oct 11 05:08:30 crc kubenswrapper[4651]: I1011 05:08:30.706787 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5c85be3f-c6fd-4d66-95ba-87b1502b5548-internal-tls-certs\") pod \"barbican-api-694f8cb944-jlqsz\" (UID: \"5c85be3f-c6fd-4d66-95ba-87b1502b5548\") " pod="openstack/barbican-api-694f8cb944-jlqsz" Oct 11 05:08:30 crc kubenswrapper[4651]: I1011 05:08:30.706838 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5c85be3f-c6fd-4d66-95ba-87b1502b5548-config-data-custom\") pod \"barbican-api-694f8cb944-jlqsz\" (UID: \"5c85be3f-c6fd-4d66-95ba-87b1502b5548\") " pod="openstack/barbican-api-694f8cb944-jlqsz" Oct 11 05:08:30 crc kubenswrapper[4651]: I1011 05:08:30.706927 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7tvkx\" (UniqueName: \"kubernetes.io/projected/5c85be3f-c6fd-4d66-95ba-87b1502b5548-kube-api-access-7tvkx\") pod \"barbican-api-694f8cb944-jlqsz\" (UID: \"5c85be3f-c6fd-4d66-95ba-87b1502b5548\") " pod="openstack/barbican-api-694f8cb944-jlqsz" Oct 11 05:08:30 crc kubenswrapper[4651]: I1011 05:08:30.706986 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c85be3f-c6fd-4d66-95ba-87b1502b5548-combined-ca-bundle\") pod \"barbican-api-694f8cb944-jlqsz\" (UID: \"5c85be3f-c6fd-4d66-95ba-87b1502b5548\") " pod="openstack/barbican-api-694f8cb944-jlqsz" Oct 11 05:08:30 crc kubenswrapper[4651]: I1011 05:08:30.707022 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5c85be3f-c6fd-4d66-95ba-87b1502b5548-logs\") pod \"barbican-api-694f8cb944-jlqsz\" (UID: \"5c85be3f-c6fd-4d66-95ba-87b1502b5548\") " pod="openstack/barbican-api-694f8cb944-jlqsz" Oct 11 05:08:30 crc kubenswrapper[4651]: I1011 05:08:30.707052 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5c85be3f-c6fd-4d66-95ba-87b1502b5548-public-tls-certs\") pod \"barbican-api-694f8cb944-jlqsz\" (UID: \"5c85be3f-c6fd-4d66-95ba-87b1502b5548\") " pod="openstack/barbican-api-694f8cb944-jlqsz" Oct 11 05:08:30 crc kubenswrapper[4651]: I1011 05:08:30.708405 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5c85be3f-c6fd-4d66-95ba-87b1502b5548-logs\") pod \"barbican-api-694f8cb944-jlqsz\" (UID: \"5c85be3f-c6fd-4d66-95ba-87b1502b5548\") " pod="openstack/barbican-api-694f8cb944-jlqsz" Oct 11 05:08:30 crc kubenswrapper[4651]: I1011 05:08:30.724837 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c85be3f-c6fd-4d66-95ba-87b1502b5548-combined-ca-bundle\") pod \"barbican-api-694f8cb944-jlqsz\" (UID: \"5c85be3f-c6fd-4d66-95ba-87b1502b5548\") " pod="openstack/barbican-api-694f8cb944-jlqsz" Oct 11 05:08:30 crc kubenswrapper[4651]: I1011 05:08:30.725585 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5c85be3f-c6fd-4d66-95ba-87b1502b5548-config-data-custom\") pod \"barbican-api-694f8cb944-jlqsz\" (UID: \"5c85be3f-c6fd-4d66-95ba-87b1502b5548\") " pod="openstack/barbican-api-694f8cb944-jlqsz" Oct 11 05:08:30 crc kubenswrapper[4651]: I1011 05:08:30.726155 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5c85be3f-c6fd-4d66-95ba-87b1502b5548-internal-tls-certs\") pod \"barbican-api-694f8cb944-jlqsz\" (UID: \"5c85be3f-c6fd-4d66-95ba-87b1502b5548\") " pod="openstack/barbican-api-694f8cb944-jlqsz" Oct 11 05:08:30 crc kubenswrapper[4651]: I1011 05:08:30.726466 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5c85be3f-c6fd-4d66-95ba-87b1502b5548-public-tls-certs\") pod \"barbican-api-694f8cb944-jlqsz\" (UID: \"5c85be3f-c6fd-4d66-95ba-87b1502b5548\") " pod="openstack/barbican-api-694f8cb944-jlqsz" Oct 11 05:08:30 crc kubenswrapper[4651]: I1011 05:08:30.727852 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c85be3f-c6fd-4d66-95ba-87b1502b5548-config-data\") pod \"barbican-api-694f8cb944-jlqsz\" (UID: \"5c85be3f-c6fd-4d66-95ba-87b1502b5548\") " pod="openstack/barbican-api-694f8cb944-jlqsz" Oct 11 05:08:30 crc kubenswrapper[4651]: I1011 05:08:30.731055 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7tvkx\" (UniqueName: \"kubernetes.io/projected/5c85be3f-c6fd-4d66-95ba-87b1502b5548-kube-api-access-7tvkx\") pod \"barbican-api-694f8cb944-jlqsz\" (UID: \"5c85be3f-c6fd-4d66-95ba-87b1502b5548\") " pod="openstack/barbican-api-694f8cb944-jlqsz" Oct 11 05:08:30 crc kubenswrapper[4651]: I1011 05:08:30.832748 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-694f8cb944-jlqsz" Oct 11 05:08:31 crc kubenswrapper[4651]: I1011 05:08:31.425968 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Oct 11 05:08:31 crc kubenswrapper[4651]: I1011 05:08:31.429969 4651 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 11 05:08:31 crc kubenswrapper[4651]: I1011 05:08:31.533885 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Oct 11 05:08:31 crc kubenswrapper[4651]: I1011 05:08:31.783420 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-694f8cb944-jlqsz"] Oct 11 05:08:32 crc kubenswrapper[4651]: I1011 05:08:32.150664 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 11 05:08:32 crc kubenswrapper[4651]: I1011 05:08:32.240711 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16b0d24a-e647-4381-9f03-9b48c34ba52f-combined-ca-bundle\") pod \"16b0d24a-e647-4381-9f03-9b48c34ba52f\" (UID: \"16b0d24a-e647-4381-9f03-9b48c34ba52f\") " Oct 11 05:08:32 crc kubenswrapper[4651]: I1011 05:08:32.241074 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/16b0d24a-e647-4381-9f03-9b48c34ba52f-run-httpd\") pod \"16b0d24a-e647-4381-9f03-9b48c34ba52f\" (UID: \"16b0d24a-e647-4381-9f03-9b48c34ba52f\") " Oct 11 05:08:32 crc kubenswrapper[4651]: I1011 05:08:32.241101 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/16b0d24a-e647-4381-9f03-9b48c34ba52f-config-data\") pod \"16b0d24a-e647-4381-9f03-9b48c34ba52f\" (UID: \"16b0d24a-e647-4381-9f03-9b48c34ba52f\") " Oct 11 05:08:32 crc kubenswrapper[4651]: I1011 05:08:32.241162 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/16b0d24a-e647-4381-9f03-9b48c34ba52f-log-httpd\") pod \"16b0d24a-e647-4381-9f03-9b48c34ba52f\" (UID: \"16b0d24a-e647-4381-9f03-9b48c34ba52f\") " Oct 11 05:08:32 crc kubenswrapper[4651]: I1011 05:08:32.241266 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/16b0d24a-e647-4381-9f03-9b48c34ba52f-sg-core-conf-yaml\") pod \"16b0d24a-e647-4381-9f03-9b48c34ba52f\" (UID: \"16b0d24a-e647-4381-9f03-9b48c34ba52f\") " Oct 11 05:08:32 crc kubenswrapper[4651]: I1011 05:08:32.241366 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/16b0d24a-e647-4381-9f03-9b48c34ba52f-scripts\") pod \"16b0d24a-e647-4381-9f03-9b48c34ba52f\" (UID: \"16b0d24a-e647-4381-9f03-9b48c34ba52f\") " Oct 11 05:08:32 crc kubenswrapper[4651]: I1011 05:08:32.241401 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p6kxl\" (UniqueName: \"kubernetes.io/projected/16b0d24a-e647-4381-9f03-9b48c34ba52f-kube-api-access-p6kxl\") pod \"16b0d24a-e647-4381-9f03-9b48c34ba52f\" (UID: \"16b0d24a-e647-4381-9f03-9b48c34ba52f\") " Oct 11 05:08:32 crc kubenswrapper[4651]: I1011 05:08:32.241539 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/16b0d24a-e647-4381-9f03-9b48c34ba52f-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "16b0d24a-e647-4381-9f03-9b48c34ba52f" (UID: "16b0d24a-e647-4381-9f03-9b48c34ba52f"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 05:08:32 crc kubenswrapper[4651]: I1011 05:08:32.241841 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/16b0d24a-e647-4381-9f03-9b48c34ba52f-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "16b0d24a-e647-4381-9f03-9b48c34ba52f" (UID: "16b0d24a-e647-4381-9f03-9b48c34ba52f"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 05:08:32 crc kubenswrapper[4651]: I1011 05:08:32.242004 4651 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/16b0d24a-e647-4381-9f03-9b48c34ba52f-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:32 crc kubenswrapper[4651]: I1011 05:08:32.242027 4651 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/16b0d24a-e647-4381-9f03-9b48c34ba52f-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:32 crc kubenswrapper[4651]: I1011 05:08:32.245039 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/16b0d24a-e647-4381-9f03-9b48c34ba52f-scripts" (OuterVolumeSpecName: "scripts") pod "16b0d24a-e647-4381-9f03-9b48c34ba52f" (UID: "16b0d24a-e647-4381-9f03-9b48c34ba52f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:08:32 crc kubenswrapper[4651]: I1011 05:08:32.246164 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/16b0d24a-e647-4381-9f03-9b48c34ba52f-kube-api-access-p6kxl" (OuterVolumeSpecName: "kube-api-access-p6kxl") pod "16b0d24a-e647-4381-9f03-9b48c34ba52f" (UID: "16b0d24a-e647-4381-9f03-9b48c34ba52f"). InnerVolumeSpecName "kube-api-access-p6kxl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:08:32 crc kubenswrapper[4651]: I1011 05:08:32.275208 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/16b0d24a-e647-4381-9f03-9b48c34ba52f-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "16b0d24a-e647-4381-9f03-9b48c34ba52f" (UID: "16b0d24a-e647-4381-9f03-9b48c34ba52f"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:08:32 crc kubenswrapper[4651]: I1011 05:08:32.313536 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/16b0d24a-e647-4381-9f03-9b48c34ba52f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "16b0d24a-e647-4381-9f03-9b48c34ba52f" (UID: "16b0d24a-e647-4381-9f03-9b48c34ba52f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:08:32 crc kubenswrapper[4651]: I1011 05:08:32.344446 4651 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16b0d24a-e647-4381-9f03-9b48c34ba52f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:32 crc kubenswrapper[4651]: I1011 05:08:32.344508 4651 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/16b0d24a-e647-4381-9f03-9b48c34ba52f-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:32 crc kubenswrapper[4651]: I1011 05:08:32.344524 4651 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/16b0d24a-e647-4381-9f03-9b48c34ba52f-scripts\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:32 crc kubenswrapper[4651]: I1011 05:08:32.344582 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p6kxl\" (UniqueName: \"kubernetes.io/projected/16b0d24a-e647-4381-9f03-9b48c34ba52f-kube-api-access-p6kxl\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:32 crc kubenswrapper[4651]: I1011 05:08:32.358190 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/16b0d24a-e647-4381-9f03-9b48c34ba52f-config-data" (OuterVolumeSpecName: "config-data") pod "16b0d24a-e647-4381-9f03-9b48c34ba52f" (UID: "16b0d24a-e647-4381-9f03-9b48c34ba52f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:08:32 crc kubenswrapper[4651]: I1011 05:08:32.440355 4651 generic.go:334] "Generic (PLEG): container finished" podID="16b0d24a-e647-4381-9f03-9b48c34ba52f" containerID="9694fee13b033094ce30c5aac1dfc12712a8e18e5878f97c2f379916d2059414" exitCode=0 Oct 11 05:08:32 crc kubenswrapper[4651]: I1011 05:08:32.440441 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"16b0d24a-e647-4381-9f03-9b48c34ba52f","Type":"ContainerDied","Data":"9694fee13b033094ce30c5aac1dfc12712a8e18e5878f97c2f379916d2059414"} Oct 11 05:08:32 crc kubenswrapper[4651]: I1011 05:08:32.440466 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"16b0d24a-e647-4381-9f03-9b48c34ba52f","Type":"ContainerDied","Data":"d168b82302ecf2149ab4bf095710ed519c5de5178b38b3f144cac0c3cef2dd1b"} Oct 11 05:08:32 crc kubenswrapper[4651]: I1011 05:08:32.440482 4651 scope.go:117] "RemoveContainer" containerID="87e0554092fce6ae4459d8cc5e40b1f6a8ab63cf1bd08c794cba430853fb94c3" Oct 11 05:08:32 crc kubenswrapper[4651]: I1011 05:08:32.440590 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 11 05:08:32 crc kubenswrapper[4651]: I1011 05:08:32.443004 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-694f8cb944-jlqsz" event={"ID":"5c85be3f-c6fd-4d66-95ba-87b1502b5548","Type":"ContainerStarted","Data":"f1501e29b67d9719b1f76d4d331e9bdc3603f163915715b26a04b1346b9da50b"} Oct 11 05:08:32 crc kubenswrapper[4651]: I1011 05:08:32.443053 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-694f8cb944-jlqsz" event={"ID":"5c85be3f-c6fd-4d66-95ba-87b1502b5548","Type":"ContainerStarted","Data":"17855a9e2caa452099751c9dfd3dc816e995622c3dfc880385b1c7d835f808a1"} Oct 11 05:08:32 crc kubenswrapper[4651]: I1011 05:08:32.443066 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-694f8cb944-jlqsz" event={"ID":"5c85be3f-c6fd-4d66-95ba-87b1502b5548","Type":"ContainerStarted","Data":"27682c341bf185118f74587d83d4e6ea8c11ba58e3da3f7ae6fab5b0bde6df3c"} Oct 11 05:08:32 crc kubenswrapper[4651]: I1011 05:08:32.443284 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-694f8cb944-jlqsz" Oct 11 05:08:32 crc kubenswrapper[4651]: I1011 05:08:32.443323 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-694f8cb944-jlqsz" Oct 11 05:08:32 crc kubenswrapper[4651]: I1011 05:08:32.444763 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-65475785f8-ljsqt" event={"ID":"3d5d4c12-9a64-4b51-9613-7d8905d3367f","Type":"ContainerStarted","Data":"151bb19babaa25890a8ec99852d0e3a80a07d89b2403fdf9eb75e6506e3c5374"} Oct 11 05:08:32 crc kubenswrapper[4651]: I1011 05:08:32.444791 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-65475785f8-ljsqt" event={"ID":"3d5d4c12-9a64-4b51-9613-7d8905d3367f","Type":"ContainerStarted","Data":"2a2ddd294cf001360da300eccb820d604d984955b6b4a424007f4f9b491b2c2b"} Oct 11 05:08:32 crc kubenswrapper[4651]: I1011 05:08:32.446388 4651 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/16b0d24a-e647-4381-9f03-9b48c34ba52f-config-data\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:32 crc kubenswrapper[4651]: I1011 05:08:32.446809 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-6b84468647-bq8d6" event={"ID":"0a7fc0ac-3c48-4cd1-9cbd-78eca125768d","Type":"ContainerStarted","Data":"5f293a789f7daf0018e80341957a5643f0d0bfb893833a4b87a9dc9893ec4040"} Oct 11 05:08:32 crc kubenswrapper[4651]: I1011 05:08:32.446881 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-6b84468647-bq8d6" event={"ID":"0a7fc0ac-3c48-4cd1-9cbd-78eca125768d","Type":"ContainerStarted","Data":"aac8028cf962f3b9665739476430414e114f889e5f1e6d63470e441a9ef99941"} Oct 11 05:08:32 crc kubenswrapper[4651]: I1011 05:08:32.464890 4651 scope.go:117] "RemoveContainer" containerID="f323265d37b6f01bd3b83da9590c0d981a9001e7f7522dd167e346b2ed31a8bc" Oct 11 05:08:32 crc kubenswrapper[4651]: I1011 05:08:32.465415 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-65475785f8-ljsqt" podStartSLOduration=2.832922546 podStartE2EDuration="5.465394413s" podCreationTimestamp="2025-10-11 05:08:27 +0000 UTC" firstStartedPulling="2025-10-11 05:08:28.659763544 +0000 UTC m=+1029.555996340" lastFinishedPulling="2025-10-11 05:08:31.292235411 +0000 UTC m=+1032.188468207" observedRunningTime="2025-10-11 05:08:32.46528173 +0000 UTC m=+1033.361514536" watchObservedRunningTime="2025-10-11 05:08:32.465394413 +0000 UTC m=+1033.361627209" Oct 11 05:08:32 crc kubenswrapper[4651]: I1011 05:08:32.496415 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-694f8cb944-jlqsz" podStartSLOduration=2.496394371 podStartE2EDuration="2.496394371s" podCreationTimestamp="2025-10-11 05:08:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 05:08:32.493221541 +0000 UTC m=+1033.389454357" watchObservedRunningTime="2025-10-11 05:08:32.496394371 +0000 UTC m=+1033.392627177" Oct 11 05:08:32 crc kubenswrapper[4651]: I1011 05:08:32.512701 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-6b84468647-bq8d6" podStartSLOduration=2.832517796 podStartE2EDuration="5.512682756s" podCreationTimestamp="2025-10-11 05:08:27 +0000 UTC" firstStartedPulling="2025-10-11 05:08:28.612369388 +0000 UTC m=+1029.508602184" lastFinishedPulling="2025-10-11 05:08:31.292534348 +0000 UTC m=+1032.188767144" observedRunningTime="2025-10-11 05:08:32.512160393 +0000 UTC m=+1033.408393189" watchObservedRunningTime="2025-10-11 05:08:32.512682756 +0000 UTC m=+1033.408915562" Oct 11 05:08:32 crc kubenswrapper[4651]: I1011 05:08:32.523930 4651 scope.go:117] "RemoveContainer" containerID="9694fee13b033094ce30c5aac1dfc12712a8e18e5878f97c2f379916d2059414" Oct 11 05:08:32 crc kubenswrapper[4651]: I1011 05:08:32.554339 4651 scope.go:117] "RemoveContainer" containerID="87e0554092fce6ae4459d8cc5e40b1f6a8ab63cf1bd08c794cba430853fb94c3" Oct 11 05:08:32 crc kubenswrapper[4651]: E1011 05:08:32.555368 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"87e0554092fce6ae4459d8cc5e40b1f6a8ab63cf1bd08c794cba430853fb94c3\": container with ID starting with 87e0554092fce6ae4459d8cc5e40b1f6a8ab63cf1bd08c794cba430853fb94c3 not found: ID does not exist" containerID="87e0554092fce6ae4459d8cc5e40b1f6a8ab63cf1bd08c794cba430853fb94c3" Oct 11 05:08:32 crc kubenswrapper[4651]: I1011 05:08:32.555406 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"87e0554092fce6ae4459d8cc5e40b1f6a8ab63cf1bd08c794cba430853fb94c3"} err="failed to get container status \"87e0554092fce6ae4459d8cc5e40b1f6a8ab63cf1bd08c794cba430853fb94c3\": rpc error: code = NotFound desc = could not find container \"87e0554092fce6ae4459d8cc5e40b1f6a8ab63cf1bd08c794cba430853fb94c3\": container with ID starting with 87e0554092fce6ae4459d8cc5e40b1f6a8ab63cf1bd08c794cba430853fb94c3 not found: ID does not exist" Oct 11 05:08:32 crc kubenswrapper[4651]: I1011 05:08:32.555433 4651 scope.go:117] "RemoveContainer" containerID="f323265d37b6f01bd3b83da9590c0d981a9001e7f7522dd167e346b2ed31a8bc" Oct 11 05:08:32 crc kubenswrapper[4651]: E1011 05:08:32.556696 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f323265d37b6f01bd3b83da9590c0d981a9001e7f7522dd167e346b2ed31a8bc\": container with ID starting with f323265d37b6f01bd3b83da9590c0d981a9001e7f7522dd167e346b2ed31a8bc not found: ID does not exist" containerID="f323265d37b6f01bd3b83da9590c0d981a9001e7f7522dd167e346b2ed31a8bc" Oct 11 05:08:32 crc kubenswrapper[4651]: I1011 05:08:32.556739 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f323265d37b6f01bd3b83da9590c0d981a9001e7f7522dd167e346b2ed31a8bc"} err="failed to get container status \"f323265d37b6f01bd3b83da9590c0d981a9001e7f7522dd167e346b2ed31a8bc\": rpc error: code = NotFound desc = could not find container \"f323265d37b6f01bd3b83da9590c0d981a9001e7f7522dd167e346b2ed31a8bc\": container with ID starting with f323265d37b6f01bd3b83da9590c0d981a9001e7f7522dd167e346b2ed31a8bc not found: ID does not exist" Oct 11 05:08:32 crc kubenswrapper[4651]: I1011 05:08:32.556767 4651 scope.go:117] "RemoveContainer" containerID="9694fee13b033094ce30c5aac1dfc12712a8e18e5878f97c2f379916d2059414" Oct 11 05:08:32 crc kubenswrapper[4651]: E1011 05:08:32.561000 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9694fee13b033094ce30c5aac1dfc12712a8e18e5878f97c2f379916d2059414\": container with ID starting with 9694fee13b033094ce30c5aac1dfc12712a8e18e5878f97c2f379916d2059414 not found: ID does not exist" containerID="9694fee13b033094ce30c5aac1dfc12712a8e18e5878f97c2f379916d2059414" Oct 11 05:08:32 crc kubenswrapper[4651]: I1011 05:08:32.561058 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9694fee13b033094ce30c5aac1dfc12712a8e18e5878f97c2f379916d2059414"} err="failed to get container status \"9694fee13b033094ce30c5aac1dfc12712a8e18e5878f97c2f379916d2059414\": rpc error: code = NotFound desc = could not find container \"9694fee13b033094ce30c5aac1dfc12712a8e18e5878f97c2f379916d2059414\": container with ID starting with 9694fee13b033094ce30c5aac1dfc12712a8e18e5878f97c2f379916d2059414 not found: ID does not exist" Oct 11 05:08:32 crc kubenswrapper[4651]: I1011 05:08:32.563949 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 11 05:08:32 crc kubenswrapper[4651]: I1011 05:08:32.579532 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 11 05:08:32 crc kubenswrapper[4651]: I1011 05:08:32.592502 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 11 05:08:32 crc kubenswrapper[4651]: E1011 05:08:32.593032 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="16b0d24a-e647-4381-9f03-9b48c34ba52f" containerName="proxy-httpd" Oct 11 05:08:32 crc kubenswrapper[4651]: I1011 05:08:32.593060 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="16b0d24a-e647-4381-9f03-9b48c34ba52f" containerName="proxy-httpd" Oct 11 05:08:32 crc kubenswrapper[4651]: E1011 05:08:32.593095 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="16b0d24a-e647-4381-9f03-9b48c34ba52f" containerName="ceilometer-notification-agent" Oct 11 05:08:32 crc kubenswrapper[4651]: I1011 05:08:32.593105 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="16b0d24a-e647-4381-9f03-9b48c34ba52f" containerName="ceilometer-notification-agent" Oct 11 05:08:32 crc kubenswrapper[4651]: E1011 05:08:32.593154 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="16b0d24a-e647-4381-9f03-9b48c34ba52f" containerName="sg-core" Oct 11 05:08:32 crc kubenswrapper[4651]: I1011 05:08:32.593163 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="16b0d24a-e647-4381-9f03-9b48c34ba52f" containerName="sg-core" Oct 11 05:08:32 crc kubenswrapper[4651]: I1011 05:08:32.593397 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="16b0d24a-e647-4381-9f03-9b48c34ba52f" containerName="proxy-httpd" Oct 11 05:08:32 crc kubenswrapper[4651]: I1011 05:08:32.593417 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="16b0d24a-e647-4381-9f03-9b48c34ba52f" containerName="sg-core" Oct 11 05:08:32 crc kubenswrapper[4651]: I1011 05:08:32.593429 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="16b0d24a-e647-4381-9f03-9b48c34ba52f" containerName="ceilometer-notification-agent" Oct 11 05:08:32 crc kubenswrapper[4651]: I1011 05:08:32.595665 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 11 05:08:32 crc kubenswrapper[4651]: I1011 05:08:32.600786 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 11 05:08:32 crc kubenswrapper[4651]: I1011 05:08:32.601528 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 11 05:08:32 crc kubenswrapper[4651]: I1011 05:08:32.608876 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 11 05:08:32 crc kubenswrapper[4651]: I1011 05:08:32.752942 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0c69b234-aa8e-440b-b730-b901ebe0a7b1-config-data\") pod \"ceilometer-0\" (UID: \"0c69b234-aa8e-440b-b730-b901ebe0a7b1\") " pod="openstack/ceilometer-0" Oct 11 05:08:32 crc kubenswrapper[4651]: I1011 05:08:32.752999 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0c69b234-aa8e-440b-b730-b901ebe0a7b1-scripts\") pod \"ceilometer-0\" (UID: \"0c69b234-aa8e-440b-b730-b901ebe0a7b1\") " pod="openstack/ceilometer-0" Oct 11 05:08:32 crc kubenswrapper[4651]: I1011 05:08:32.753069 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0c69b234-aa8e-440b-b730-b901ebe0a7b1-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0c69b234-aa8e-440b-b730-b901ebe0a7b1\") " pod="openstack/ceilometer-0" Oct 11 05:08:32 crc kubenswrapper[4651]: I1011 05:08:32.753098 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0c69b234-aa8e-440b-b730-b901ebe0a7b1-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0c69b234-aa8e-440b-b730-b901ebe0a7b1\") " pod="openstack/ceilometer-0" Oct 11 05:08:32 crc kubenswrapper[4651]: I1011 05:08:32.753155 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5j2d4\" (UniqueName: \"kubernetes.io/projected/0c69b234-aa8e-440b-b730-b901ebe0a7b1-kube-api-access-5j2d4\") pod \"ceilometer-0\" (UID: \"0c69b234-aa8e-440b-b730-b901ebe0a7b1\") " pod="openstack/ceilometer-0" Oct 11 05:08:32 crc kubenswrapper[4651]: I1011 05:08:32.753200 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0c69b234-aa8e-440b-b730-b901ebe0a7b1-run-httpd\") pod \"ceilometer-0\" (UID: \"0c69b234-aa8e-440b-b730-b901ebe0a7b1\") " pod="openstack/ceilometer-0" Oct 11 05:08:32 crc kubenswrapper[4651]: I1011 05:08:32.753227 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0c69b234-aa8e-440b-b730-b901ebe0a7b1-log-httpd\") pod \"ceilometer-0\" (UID: \"0c69b234-aa8e-440b-b730-b901ebe0a7b1\") " pod="openstack/ceilometer-0" Oct 11 05:08:32 crc kubenswrapper[4651]: I1011 05:08:32.855018 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0c69b234-aa8e-440b-b730-b901ebe0a7b1-config-data\") pod \"ceilometer-0\" (UID: \"0c69b234-aa8e-440b-b730-b901ebe0a7b1\") " pod="openstack/ceilometer-0" Oct 11 05:08:32 crc kubenswrapper[4651]: I1011 05:08:32.855350 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0c69b234-aa8e-440b-b730-b901ebe0a7b1-scripts\") pod \"ceilometer-0\" (UID: \"0c69b234-aa8e-440b-b730-b901ebe0a7b1\") " pod="openstack/ceilometer-0" Oct 11 05:08:32 crc kubenswrapper[4651]: I1011 05:08:32.855415 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0c69b234-aa8e-440b-b730-b901ebe0a7b1-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0c69b234-aa8e-440b-b730-b901ebe0a7b1\") " pod="openstack/ceilometer-0" Oct 11 05:08:32 crc kubenswrapper[4651]: I1011 05:08:32.855462 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0c69b234-aa8e-440b-b730-b901ebe0a7b1-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0c69b234-aa8e-440b-b730-b901ebe0a7b1\") " pod="openstack/ceilometer-0" Oct 11 05:08:32 crc kubenswrapper[4651]: I1011 05:08:32.855515 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5j2d4\" (UniqueName: \"kubernetes.io/projected/0c69b234-aa8e-440b-b730-b901ebe0a7b1-kube-api-access-5j2d4\") pod \"ceilometer-0\" (UID: \"0c69b234-aa8e-440b-b730-b901ebe0a7b1\") " pod="openstack/ceilometer-0" Oct 11 05:08:32 crc kubenswrapper[4651]: I1011 05:08:32.855564 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0c69b234-aa8e-440b-b730-b901ebe0a7b1-run-httpd\") pod \"ceilometer-0\" (UID: \"0c69b234-aa8e-440b-b730-b901ebe0a7b1\") " pod="openstack/ceilometer-0" Oct 11 05:08:32 crc kubenswrapper[4651]: I1011 05:08:32.855592 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0c69b234-aa8e-440b-b730-b901ebe0a7b1-log-httpd\") pod \"ceilometer-0\" (UID: \"0c69b234-aa8e-440b-b730-b901ebe0a7b1\") " pod="openstack/ceilometer-0" Oct 11 05:08:32 crc kubenswrapper[4651]: I1011 05:08:32.856562 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0c69b234-aa8e-440b-b730-b901ebe0a7b1-log-httpd\") pod \"ceilometer-0\" (UID: \"0c69b234-aa8e-440b-b730-b901ebe0a7b1\") " pod="openstack/ceilometer-0" Oct 11 05:08:32 crc kubenswrapper[4651]: I1011 05:08:32.856676 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0c69b234-aa8e-440b-b730-b901ebe0a7b1-run-httpd\") pod \"ceilometer-0\" (UID: \"0c69b234-aa8e-440b-b730-b901ebe0a7b1\") " pod="openstack/ceilometer-0" Oct 11 05:08:32 crc kubenswrapper[4651]: I1011 05:08:32.859108 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0c69b234-aa8e-440b-b730-b901ebe0a7b1-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0c69b234-aa8e-440b-b730-b901ebe0a7b1\") " pod="openstack/ceilometer-0" Oct 11 05:08:32 crc kubenswrapper[4651]: I1011 05:08:32.859669 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0c69b234-aa8e-440b-b730-b901ebe0a7b1-config-data\") pod \"ceilometer-0\" (UID: \"0c69b234-aa8e-440b-b730-b901ebe0a7b1\") " pod="openstack/ceilometer-0" Oct 11 05:08:32 crc kubenswrapper[4651]: I1011 05:08:32.859869 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0c69b234-aa8e-440b-b730-b901ebe0a7b1-scripts\") pod \"ceilometer-0\" (UID: \"0c69b234-aa8e-440b-b730-b901ebe0a7b1\") " pod="openstack/ceilometer-0" Oct 11 05:08:32 crc kubenswrapper[4651]: I1011 05:08:32.860443 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0c69b234-aa8e-440b-b730-b901ebe0a7b1-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0c69b234-aa8e-440b-b730-b901ebe0a7b1\") " pod="openstack/ceilometer-0" Oct 11 05:08:32 crc kubenswrapper[4651]: I1011 05:08:32.873844 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5j2d4\" (UniqueName: \"kubernetes.io/projected/0c69b234-aa8e-440b-b730-b901ebe0a7b1-kube-api-access-5j2d4\") pod \"ceilometer-0\" (UID: \"0c69b234-aa8e-440b-b730-b901ebe0a7b1\") " pod="openstack/ceilometer-0" Oct 11 05:08:32 crc kubenswrapper[4651]: I1011 05:08:32.925860 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 11 05:08:33 crc kubenswrapper[4651]: W1011 05:08:33.498496 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0c69b234_aa8e_440b_b730_b901ebe0a7b1.slice/crio-c742b51a7c237adb031758a21c14952de2fb86827e756eb22a9051967c5fd82e WatchSource:0}: Error finding container c742b51a7c237adb031758a21c14952de2fb86827e756eb22a9051967c5fd82e: Status 404 returned error can't find the container with id c742b51a7c237adb031758a21c14952de2fb86827e756eb22a9051967c5fd82e Oct 11 05:08:33 crc kubenswrapper[4651]: I1011 05:08:33.501276 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 11 05:08:33 crc kubenswrapper[4651]: I1011 05:08:33.879851 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="16b0d24a-e647-4381-9f03-9b48c34ba52f" path="/var/lib/kubelet/pods/16b0d24a-e647-4381-9f03-9b48c34ba52f/volumes" Oct 11 05:08:34 crc kubenswrapper[4651]: I1011 05:08:34.488229 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0c69b234-aa8e-440b-b730-b901ebe0a7b1","Type":"ContainerStarted","Data":"3c0f85abdf531f0692e26e985d330571d01a76caaf6618b4aca879dd281d80f2"} Oct 11 05:08:34 crc kubenswrapper[4651]: I1011 05:08:34.488550 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0c69b234-aa8e-440b-b730-b901ebe0a7b1","Type":"ContainerStarted","Data":"c742b51a7c237adb031758a21c14952de2fb86827e756eb22a9051967c5fd82e"} Oct 11 05:08:34 crc kubenswrapper[4651]: I1011 05:08:34.490713 4651 generic.go:334] "Generic (PLEG): container finished" podID="36b45d75-4e52-49b7-b7d7-13d53d2f7076" containerID="97eb21467ac13a278c5276c9ef33b51b684f94f0ddbeea3ea84e7606baaf3708" exitCode=0 Oct 11 05:08:34 crc kubenswrapper[4651]: I1011 05:08:34.490763 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-8rxgw" event={"ID":"36b45d75-4e52-49b7-b7d7-13d53d2f7076","Type":"ContainerDied","Data":"97eb21467ac13a278c5276c9ef33b51b684f94f0ddbeea3ea84e7606baaf3708"} Oct 11 05:08:35 crc kubenswrapper[4651]: I1011 05:08:35.513599 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0c69b234-aa8e-440b-b730-b901ebe0a7b1","Type":"ContainerStarted","Data":"87d142c83cbdde5c738e3a4deb5da1b53b953facf14e583ff63f3187908e6c4c"} Oct 11 05:08:35 crc kubenswrapper[4651]: I1011 05:08:35.922913 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-8rxgw" Oct 11 05:08:35 crc kubenswrapper[4651]: I1011 05:08:35.976029 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/36b45d75-4e52-49b7-b7d7-13d53d2f7076-etc-machine-id\") pod \"36b45d75-4e52-49b7-b7d7-13d53d2f7076\" (UID: \"36b45d75-4e52-49b7-b7d7-13d53d2f7076\") " Oct 11 05:08:35 crc kubenswrapper[4651]: I1011 05:08:35.976135 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36b45d75-4e52-49b7-b7d7-13d53d2f7076-combined-ca-bundle\") pod \"36b45d75-4e52-49b7-b7d7-13d53d2f7076\" (UID: \"36b45d75-4e52-49b7-b7d7-13d53d2f7076\") " Oct 11 05:08:35 crc kubenswrapper[4651]: I1011 05:08:35.976206 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/36b45d75-4e52-49b7-b7d7-13d53d2f7076-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "36b45d75-4e52-49b7-b7d7-13d53d2f7076" (UID: "36b45d75-4e52-49b7-b7d7-13d53d2f7076"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 11 05:08:35 crc kubenswrapper[4651]: I1011 05:08:35.976294 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36b45d75-4e52-49b7-b7d7-13d53d2f7076-config-data\") pod \"36b45d75-4e52-49b7-b7d7-13d53d2f7076\" (UID: \"36b45d75-4e52-49b7-b7d7-13d53d2f7076\") " Oct 11 05:08:35 crc kubenswrapper[4651]: I1011 05:08:35.976316 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nbjpx\" (UniqueName: \"kubernetes.io/projected/36b45d75-4e52-49b7-b7d7-13d53d2f7076-kube-api-access-nbjpx\") pod \"36b45d75-4e52-49b7-b7d7-13d53d2f7076\" (UID: \"36b45d75-4e52-49b7-b7d7-13d53d2f7076\") " Oct 11 05:08:35 crc kubenswrapper[4651]: I1011 05:08:35.976347 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/36b45d75-4e52-49b7-b7d7-13d53d2f7076-db-sync-config-data\") pod \"36b45d75-4e52-49b7-b7d7-13d53d2f7076\" (UID: \"36b45d75-4e52-49b7-b7d7-13d53d2f7076\") " Oct 11 05:08:35 crc kubenswrapper[4651]: I1011 05:08:35.976407 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/36b45d75-4e52-49b7-b7d7-13d53d2f7076-scripts\") pod \"36b45d75-4e52-49b7-b7d7-13d53d2f7076\" (UID: \"36b45d75-4e52-49b7-b7d7-13d53d2f7076\") " Oct 11 05:08:35 crc kubenswrapper[4651]: I1011 05:08:35.976775 4651 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/36b45d75-4e52-49b7-b7d7-13d53d2f7076-etc-machine-id\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:35 crc kubenswrapper[4651]: I1011 05:08:35.981058 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/36b45d75-4e52-49b7-b7d7-13d53d2f7076-kube-api-access-nbjpx" (OuterVolumeSpecName: "kube-api-access-nbjpx") pod "36b45d75-4e52-49b7-b7d7-13d53d2f7076" (UID: "36b45d75-4e52-49b7-b7d7-13d53d2f7076"). InnerVolumeSpecName "kube-api-access-nbjpx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:08:35 crc kubenswrapper[4651]: I1011 05:08:35.981606 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36b45d75-4e52-49b7-b7d7-13d53d2f7076-scripts" (OuterVolumeSpecName: "scripts") pod "36b45d75-4e52-49b7-b7d7-13d53d2f7076" (UID: "36b45d75-4e52-49b7-b7d7-13d53d2f7076"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:08:35 crc kubenswrapper[4651]: I1011 05:08:35.981982 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36b45d75-4e52-49b7-b7d7-13d53d2f7076-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "36b45d75-4e52-49b7-b7d7-13d53d2f7076" (UID: "36b45d75-4e52-49b7-b7d7-13d53d2f7076"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:08:36 crc kubenswrapper[4651]: I1011 05:08:36.002464 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36b45d75-4e52-49b7-b7d7-13d53d2f7076-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "36b45d75-4e52-49b7-b7d7-13d53d2f7076" (UID: "36b45d75-4e52-49b7-b7d7-13d53d2f7076"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:08:36 crc kubenswrapper[4651]: I1011 05:08:36.020636 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36b45d75-4e52-49b7-b7d7-13d53d2f7076-config-data" (OuterVolumeSpecName: "config-data") pod "36b45d75-4e52-49b7-b7d7-13d53d2f7076" (UID: "36b45d75-4e52-49b7-b7d7-13d53d2f7076"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:08:36 crc kubenswrapper[4651]: I1011 05:08:36.079523 4651 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/36b45d75-4e52-49b7-b7d7-13d53d2f7076-scripts\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:36 crc kubenswrapper[4651]: I1011 05:08:36.079562 4651 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36b45d75-4e52-49b7-b7d7-13d53d2f7076-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:36 crc kubenswrapper[4651]: I1011 05:08:36.079576 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nbjpx\" (UniqueName: \"kubernetes.io/projected/36b45d75-4e52-49b7-b7d7-13d53d2f7076-kube-api-access-nbjpx\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:36 crc kubenswrapper[4651]: I1011 05:08:36.079585 4651 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36b45d75-4e52-49b7-b7d7-13d53d2f7076-config-data\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:36 crc kubenswrapper[4651]: I1011 05:08:36.079599 4651 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/36b45d75-4e52-49b7-b7d7-13d53d2f7076-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:36 crc kubenswrapper[4651]: I1011 05:08:36.523714 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0c69b234-aa8e-440b-b730-b901ebe0a7b1","Type":"ContainerStarted","Data":"d851878bbb579eae5133c54b976d0f45d2db8027ecb879e9dfa2974147c2688f"} Oct 11 05:08:36 crc kubenswrapper[4651]: I1011 05:08:36.526156 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-8rxgw" event={"ID":"36b45d75-4e52-49b7-b7d7-13d53d2f7076","Type":"ContainerDied","Data":"4bfa103027f7526766f59ef9bd0531914559ad883df5930f932503999b6165f3"} Oct 11 05:08:36 crc kubenswrapper[4651]: I1011 05:08:36.526289 4651 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4bfa103027f7526766f59ef9bd0531914559ad883df5930f932503999b6165f3" Oct 11 05:08:36 crc kubenswrapper[4651]: I1011 05:08:36.526424 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-8rxgw" Oct 11 05:08:36 crc kubenswrapper[4651]: I1011 05:08:36.820170 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Oct 11 05:08:36 crc kubenswrapper[4651]: E1011 05:08:36.820620 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="36b45d75-4e52-49b7-b7d7-13d53d2f7076" containerName="cinder-db-sync" Oct 11 05:08:36 crc kubenswrapper[4651]: I1011 05:08:36.820643 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="36b45d75-4e52-49b7-b7d7-13d53d2f7076" containerName="cinder-db-sync" Oct 11 05:08:36 crc kubenswrapper[4651]: I1011 05:08:36.820937 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="36b45d75-4e52-49b7-b7d7-13d53d2f7076" containerName="cinder-db-sync" Oct 11 05:08:36 crc kubenswrapper[4651]: I1011 05:08:36.822181 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 11 05:08:36 crc kubenswrapper[4651]: I1011 05:08:36.827649 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Oct 11 05:08:36 crc kubenswrapper[4651]: I1011 05:08:36.827771 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-wzgq2" Oct 11 05:08:36 crc kubenswrapper[4651]: I1011 05:08:36.827887 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Oct 11 05:08:36 crc kubenswrapper[4651]: I1011 05:08:36.827904 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Oct 11 05:08:36 crc kubenswrapper[4651]: I1011 05:08:36.850372 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 11 05:08:36 crc kubenswrapper[4651]: I1011 05:08:36.882444 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-688c87cc99-nmtk4"] Oct 11 05:08:36 crc kubenswrapper[4651]: I1011 05:08:36.882714 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-688c87cc99-nmtk4" podUID="449ef777-d7ea-4103-be80-917dca65751f" containerName="dnsmasq-dns" containerID="cri-o://e11ef0c5ab87a73e934163863a1bea0b8b3d8f0c49c7642b55e98262ad4852cf" gracePeriod=10 Oct 11 05:08:36 crc kubenswrapper[4651]: I1011 05:08:36.891273 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-688c87cc99-nmtk4" Oct 11 05:08:36 crc kubenswrapper[4651]: I1011 05:08:36.895535 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2df51120-c272-4eb3-bbdf-85c2753f7640-config-data\") pod \"cinder-scheduler-0\" (UID: \"2df51120-c272-4eb3-bbdf-85c2753f7640\") " pod="openstack/cinder-scheduler-0" Oct 11 05:08:36 crc kubenswrapper[4651]: I1011 05:08:36.895588 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7czwc\" (UniqueName: \"kubernetes.io/projected/2df51120-c272-4eb3-bbdf-85c2753f7640-kube-api-access-7czwc\") pod \"cinder-scheduler-0\" (UID: \"2df51120-c272-4eb3-bbdf-85c2753f7640\") " pod="openstack/cinder-scheduler-0" Oct 11 05:08:36 crc kubenswrapper[4651]: I1011 05:08:36.895625 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2df51120-c272-4eb3-bbdf-85c2753f7640-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"2df51120-c272-4eb3-bbdf-85c2753f7640\") " pod="openstack/cinder-scheduler-0" Oct 11 05:08:36 crc kubenswrapper[4651]: I1011 05:08:36.895719 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2df51120-c272-4eb3-bbdf-85c2753f7640-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"2df51120-c272-4eb3-bbdf-85c2753f7640\") " pod="openstack/cinder-scheduler-0" Oct 11 05:08:36 crc kubenswrapper[4651]: I1011 05:08:36.895745 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2df51120-c272-4eb3-bbdf-85c2753f7640-scripts\") pod \"cinder-scheduler-0\" (UID: \"2df51120-c272-4eb3-bbdf-85c2753f7640\") " pod="openstack/cinder-scheduler-0" Oct 11 05:08:36 crc kubenswrapper[4651]: I1011 05:08:36.896948 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2df51120-c272-4eb3-bbdf-85c2753f7640-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"2df51120-c272-4eb3-bbdf-85c2753f7640\") " pod="openstack/cinder-scheduler-0" Oct 11 05:08:36 crc kubenswrapper[4651]: I1011 05:08:36.979190 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6bb4fc677f-n8cw8"] Oct 11 05:08:36 crc kubenswrapper[4651]: I1011 05:08:36.980853 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bb4fc677f-n8cw8" Oct 11 05:08:37 crc kubenswrapper[4651]: I1011 05:08:37.000899 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2df51120-c272-4eb3-bbdf-85c2753f7640-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"2df51120-c272-4eb3-bbdf-85c2753f7640\") " pod="openstack/cinder-scheduler-0" Oct 11 05:08:37 crc kubenswrapper[4651]: I1011 05:08:37.000942 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2df51120-c272-4eb3-bbdf-85c2753f7640-scripts\") pod \"cinder-scheduler-0\" (UID: \"2df51120-c272-4eb3-bbdf-85c2753f7640\") " pod="openstack/cinder-scheduler-0" Oct 11 05:08:37 crc kubenswrapper[4651]: I1011 05:08:37.000970 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2df51120-c272-4eb3-bbdf-85c2753f7640-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"2df51120-c272-4eb3-bbdf-85c2753f7640\") " pod="openstack/cinder-scheduler-0" Oct 11 05:08:37 crc kubenswrapper[4651]: I1011 05:08:37.001072 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2df51120-c272-4eb3-bbdf-85c2753f7640-config-data\") pod \"cinder-scheduler-0\" (UID: \"2df51120-c272-4eb3-bbdf-85c2753f7640\") " pod="openstack/cinder-scheduler-0" Oct 11 05:08:37 crc kubenswrapper[4651]: I1011 05:08:37.001103 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7czwc\" (UniqueName: \"kubernetes.io/projected/2df51120-c272-4eb3-bbdf-85c2753f7640-kube-api-access-7czwc\") pod \"cinder-scheduler-0\" (UID: \"2df51120-c272-4eb3-bbdf-85c2753f7640\") " pod="openstack/cinder-scheduler-0" Oct 11 05:08:37 crc kubenswrapper[4651]: I1011 05:08:37.001134 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2df51120-c272-4eb3-bbdf-85c2753f7640-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"2df51120-c272-4eb3-bbdf-85c2753f7640\") " pod="openstack/cinder-scheduler-0" Oct 11 05:08:37 crc kubenswrapper[4651]: I1011 05:08:37.004803 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2df51120-c272-4eb3-bbdf-85c2753f7640-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"2df51120-c272-4eb3-bbdf-85c2753f7640\") " pod="openstack/cinder-scheduler-0" Oct 11 05:08:37 crc kubenswrapper[4651]: I1011 05:08:37.010173 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2df51120-c272-4eb3-bbdf-85c2753f7640-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"2df51120-c272-4eb3-bbdf-85c2753f7640\") " pod="openstack/cinder-scheduler-0" Oct 11 05:08:37 crc kubenswrapper[4651]: I1011 05:08:37.016446 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6bb4fc677f-n8cw8"] Oct 11 05:08:37 crc kubenswrapper[4651]: I1011 05:08:37.033382 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2df51120-c272-4eb3-bbdf-85c2753f7640-config-data\") pod \"cinder-scheduler-0\" (UID: \"2df51120-c272-4eb3-bbdf-85c2753f7640\") " pod="openstack/cinder-scheduler-0" Oct 11 05:08:37 crc kubenswrapper[4651]: I1011 05:08:37.033758 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2df51120-c272-4eb3-bbdf-85c2753f7640-scripts\") pod \"cinder-scheduler-0\" (UID: \"2df51120-c272-4eb3-bbdf-85c2753f7640\") " pod="openstack/cinder-scheduler-0" Oct 11 05:08:37 crc kubenswrapper[4651]: I1011 05:08:37.051589 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2df51120-c272-4eb3-bbdf-85c2753f7640-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"2df51120-c272-4eb3-bbdf-85c2753f7640\") " pod="openstack/cinder-scheduler-0" Oct 11 05:08:37 crc kubenswrapper[4651]: I1011 05:08:37.084378 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7czwc\" (UniqueName: \"kubernetes.io/projected/2df51120-c272-4eb3-bbdf-85c2753f7640-kube-api-access-7czwc\") pod \"cinder-scheduler-0\" (UID: \"2df51120-c272-4eb3-bbdf-85c2753f7640\") " pod="openstack/cinder-scheduler-0" Oct 11 05:08:37 crc kubenswrapper[4651]: I1011 05:08:37.104600 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qb5fq\" (UniqueName: \"kubernetes.io/projected/482a99d8-100a-4f9c-9cd2-2834cd349de7-kube-api-access-qb5fq\") pod \"dnsmasq-dns-6bb4fc677f-n8cw8\" (UID: \"482a99d8-100a-4f9c-9cd2-2834cd349de7\") " pod="openstack/dnsmasq-dns-6bb4fc677f-n8cw8" Oct 11 05:08:37 crc kubenswrapper[4651]: I1011 05:08:37.104724 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/482a99d8-100a-4f9c-9cd2-2834cd349de7-dns-swift-storage-0\") pod \"dnsmasq-dns-6bb4fc677f-n8cw8\" (UID: \"482a99d8-100a-4f9c-9cd2-2834cd349de7\") " pod="openstack/dnsmasq-dns-6bb4fc677f-n8cw8" Oct 11 05:08:37 crc kubenswrapper[4651]: I1011 05:08:37.105004 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/482a99d8-100a-4f9c-9cd2-2834cd349de7-ovsdbserver-sb\") pod \"dnsmasq-dns-6bb4fc677f-n8cw8\" (UID: \"482a99d8-100a-4f9c-9cd2-2834cd349de7\") " pod="openstack/dnsmasq-dns-6bb4fc677f-n8cw8" Oct 11 05:08:37 crc kubenswrapper[4651]: I1011 05:08:37.105103 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/482a99d8-100a-4f9c-9cd2-2834cd349de7-ovsdbserver-nb\") pod \"dnsmasq-dns-6bb4fc677f-n8cw8\" (UID: \"482a99d8-100a-4f9c-9cd2-2834cd349de7\") " pod="openstack/dnsmasq-dns-6bb4fc677f-n8cw8" Oct 11 05:08:37 crc kubenswrapper[4651]: I1011 05:08:37.105247 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/482a99d8-100a-4f9c-9cd2-2834cd349de7-dns-svc\") pod \"dnsmasq-dns-6bb4fc677f-n8cw8\" (UID: \"482a99d8-100a-4f9c-9cd2-2834cd349de7\") " pod="openstack/dnsmasq-dns-6bb4fc677f-n8cw8" Oct 11 05:08:37 crc kubenswrapper[4651]: I1011 05:08:37.105324 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/482a99d8-100a-4f9c-9cd2-2834cd349de7-config\") pod \"dnsmasq-dns-6bb4fc677f-n8cw8\" (UID: \"482a99d8-100a-4f9c-9cd2-2834cd349de7\") " pod="openstack/dnsmasq-dns-6bb4fc677f-n8cw8" Oct 11 05:08:37 crc kubenswrapper[4651]: I1011 05:08:37.170800 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Oct 11 05:08:37 crc kubenswrapper[4651]: I1011 05:08:37.174260 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 11 05:08:37 crc kubenswrapper[4651]: I1011 05:08:37.175810 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Oct 11 05:08:37 crc kubenswrapper[4651]: I1011 05:08:37.179306 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Oct 11 05:08:37 crc kubenswrapper[4651]: I1011 05:08:37.207785 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/482a99d8-100a-4f9c-9cd2-2834cd349de7-dns-svc\") pod \"dnsmasq-dns-6bb4fc677f-n8cw8\" (UID: \"482a99d8-100a-4f9c-9cd2-2834cd349de7\") " pod="openstack/dnsmasq-dns-6bb4fc677f-n8cw8" Oct 11 05:08:37 crc kubenswrapper[4651]: I1011 05:08:37.207872 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/482a99d8-100a-4f9c-9cd2-2834cd349de7-config\") pod \"dnsmasq-dns-6bb4fc677f-n8cw8\" (UID: \"482a99d8-100a-4f9c-9cd2-2834cd349de7\") " pod="openstack/dnsmasq-dns-6bb4fc677f-n8cw8" Oct 11 05:08:37 crc kubenswrapper[4651]: I1011 05:08:37.207960 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qb5fq\" (UniqueName: \"kubernetes.io/projected/482a99d8-100a-4f9c-9cd2-2834cd349de7-kube-api-access-qb5fq\") pod \"dnsmasq-dns-6bb4fc677f-n8cw8\" (UID: \"482a99d8-100a-4f9c-9cd2-2834cd349de7\") " pod="openstack/dnsmasq-dns-6bb4fc677f-n8cw8" Oct 11 05:08:37 crc kubenswrapper[4651]: I1011 05:08:37.208022 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/482a99d8-100a-4f9c-9cd2-2834cd349de7-dns-swift-storage-0\") pod \"dnsmasq-dns-6bb4fc677f-n8cw8\" (UID: \"482a99d8-100a-4f9c-9cd2-2834cd349de7\") " pod="openstack/dnsmasq-dns-6bb4fc677f-n8cw8" Oct 11 05:08:37 crc kubenswrapper[4651]: I1011 05:08:37.208069 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/482a99d8-100a-4f9c-9cd2-2834cd349de7-ovsdbserver-sb\") pod \"dnsmasq-dns-6bb4fc677f-n8cw8\" (UID: \"482a99d8-100a-4f9c-9cd2-2834cd349de7\") " pod="openstack/dnsmasq-dns-6bb4fc677f-n8cw8" Oct 11 05:08:37 crc kubenswrapper[4651]: I1011 05:08:37.208102 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/482a99d8-100a-4f9c-9cd2-2834cd349de7-ovsdbserver-nb\") pod \"dnsmasq-dns-6bb4fc677f-n8cw8\" (UID: \"482a99d8-100a-4f9c-9cd2-2834cd349de7\") " pod="openstack/dnsmasq-dns-6bb4fc677f-n8cw8" Oct 11 05:08:37 crc kubenswrapper[4651]: I1011 05:08:37.209252 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/482a99d8-100a-4f9c-9cd2-2834cd349de7-config\") pod \"dnsmasq-dns-6bb4fc677f-n8cw8\" (UID: \"482a99d8-100a-4f9c-9cd2-2834cd349de7\") " pod="openstack/dnsmasq-dns-6bb4fc677f-n8cw8" Oct 11 05:08:37 crc kubenswrapper[4651]: I1011 05:08:37.209274 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/482a99d8-100a-4f9c-9cd2-2834cd349de7-ovsdbserver-nb\") pod \"dnsmasq-dns-6bb4fc677f-n8cw8\" (UID: \"482a99d8-100a-4f9c-9cd2-2834cd349de7\") " pod="openstack/dnsmasq-dns-6bb4fc677f-n8cw8" Oct 11 05:08:37 crc kubenswrapper[4651]: I1011 05:08:37.209307 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/482a99d8-100a-4f9c-9cd2-2834cd349de7-dns-svc\") pod \"dnsmasq-dns-6bb4fc677f-n8cw8\" (UID: \"482a99d8-100a-4f9c-9cd2-2834cd349de7\") " pod="openstack/dnsmasq-dns-6bb4fc677f-n8cw8" Oct 11 05:08:37 crc kubenswrapper[4651]: I1011 05:08:37.209429 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/482a99d8-100a-4f9c-9cd2-2834cd349de7-dns-swift-storage-0\") pod \"dnsmasq-dns-6bb4fc677f-n8cw8\" (UID: \"482a99d8-100a-4f9c-9cd2-2834cd349de7\") " pod="openstack/dnsmasq-dns-6bb4fc677f-n8cw8" Oct 11 05:08:37 crc kubenswrapper[4651]: I1011 05:08:37.210654 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/482a99d8-100a-4f9c-9cd2-2834cd349de7-ovsdbserver-sb\") pod \"dnsmasq-dns-6bb4fc677f-n8cw8\" (UID: \"482a99d8-100a-4f9c-9cd2-2834cd349de7\") " pod="openstack/dnsmasq-dns-6bb4fc677f-n8cw8" Oct 11 05:08:37 crc kubenswrapper[4651]: I1011 05:08:37.228319 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qb5fq\" (UniqueName: \"kubernetes.io/projected/482a99d8-100a-4f9c-9cd2-2834cd349de7-kube-api-access-qb5fq\") pod \"dnsmasq-dns-6bb4fc677f-n8cw8\" (UID: \"482a99d8-100a-4f9c-9cd2-2834cd349de7\") " pod="openstack/dnsmasq-dns-6bb4fc677f-n8cw8" Oct 11 05:08:37 crc kubenswrapper[4651]: I1011 05:08:37.242739 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 11 05:08:37 crc kubenswrapper[4651]: I1011 05:08:37.309392 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6bwn5\" (UniqueName: \"kubernetes.io/projected/d61d0161-dfcd-4700-8940-e6d1754e3e54-kube-api-access-6bwn5\") pod \"cinder-api-0\" (UID: \"d61d0161-dfcd-4700-8940-e6d1754e3e54\") " pod="openstack/cinder-api-0" Oct 11 05:08:37 crc kubenswrapper[4651]: I1011 05:08:37.309448 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d61d0161-dfcd-4700-8940-e6d1754e3e54-config-data-custom\") pod \"cinder-api-0\" (UID: \"d61d0161-dfcd-4700-8940-e6d1754e3e54\") " pod="openstack/cinder-api-0" Oct 11 05:08:37 crc kubenswrapper[4651]: I1011 05:08:37.309517 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d61d0161-dfcd-4700-8940-e6d1754e3e54-scripts\") pod \"cinder-api-0\" (UID: \"d61d0161-dfcd-4700-8940-e6d1754e3e54\") " pod="openstack/cinder-api-0" Oct 11 05:08:37 crc kubenswrapper[4651]: I1011 05:08:37.309866 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d61d0161-dfcd-4700-8940-e6d1754e3e54-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"d61d0161-dfcd-4700-8940-e6d1754e3e54\") " pod="openstack/cinder-api-0" Oct 11 05:08:37 crc kubenswrapper[4651]: I1011 05:08:37.309914 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d61d0161-dfcd-4700-8940-e6d1754e3e54-config-data\") pod \"cinder-api-0\" (UID: \"d61d0161-dfcd-4700-8940-e6d1754e3e54\") " pod="openstack/cinder-api-0" Oct 11 05:08:37 crc kubenswrapper[4651]: I1011 05:08:37.310218 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d61d0161-dfcd-4700-8940-e6d1754e3e54-logs\") pod \"cinder-api-0\" (UID: \"d61d0161-dfcd-4700-8940-e6d1754e3e54\") " pod="openstack/cinder-api-0" Oct 11 05:08:37 crc kubenswrapper[4651]: I1011 05:08:37.310316 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/d61d0161-dfcd-4700-8940-e6d1754e3e54-etc-machine-id\") pod \"cinder-api-0\" (UID: \"d61d0161-dfcd-4700-8940-e6d1754e3e54\") " pod="openstack/cinder-api-0" Oct 11 05:08:37 crc kubenswrapper[4651]: I1011 05:08:37.413194 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6bwn5\" (UniqueName: \"kubernetes.io/projected/d61d0161-dfcd-4700-8940-e6d1754e3e54-kube-api-access-6bwn5\") pod \"cinder-api-0\" (UID: \"d61d0161-dfcd-4700-8940-e6d1754e3e54\") " pod="openstack/cinder-api-0" Oct 11 05:08:37 crc kubenswrapper[4651]: I1011 05:08:37.413411 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d61d0161-dfcd-4700-8940-e6d1754e3e54-config-data-custom\") pod \"cinder-api-0\" (UID: \"d61d0161-dfcd-4700-8940-e6d1754e3e54\") " pod="openstack/cinder-api-0" Oct 11 05:08:37 crc kubenswrapper[4651]: I1011 05:08:37.413452 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d61d0161-dfcd-4700-8940-e6d1754e3e54-scripts\") pod \"cinder-api-0\" (UID: \"d61d0161-dfcd-4700-8940-e6d1754e3e54\") " pod="openstack/cinder-api-0" Oct 11 05:08:37 crc kubenswrapper[4651]: I1011 05:08:37.413487 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d61d0161-dfcd-4700-8940-e6d1754e3e54-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"d61d0161-dfcd-4700-8940-e6d1754e3e54\") " pod="openstack/cinder-api-0" Oct 11 05:08:37 crc kubenswrapper[4651]: I1011 05:08:37.413510 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d61d0161-dfcd-4700-8940-e6d1754e3e54-config-data\") pod \"cinder-api-0\" (UID: \"d61d0161-dfcd-4700-8940-e6d1754e3e54\") " pod="openstack/cinder-api-0" Oct 11 05:08:37 crc kubenswrapper[4651]: I1011 05:08:37.413574 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d61d0161-dfcd-4700-8940-e6d1754e3e54-logs\") pod \"cinder-api-0\" (UID: \"d61d0161-dfcd-4700-8940-e6d1754e3e54\") " pod="openstack/cinder-api-0" Oct 11 05:08:37 crc kubenswrapper[4651]: I1011 05:08:37.413592 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/d61d0161-dfcd-4700-8940-e6d1754e3e54-etc-machine-id\") pod \"cinder-api-0\" (UID: \"d61d0161-dfcd-4700-8940-e6d1754e3e54\") " pod="openstack/cinder-api-0" Oct 11 05:08:37 crc kubenswrapper[4651]: I1011 05:08:37.413674 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/d61d0161-dfcd-4700-8940-e6d1754e3e54-etc-machine-id\") pod \"cinder-api-0\" (UID: \"d61d0161-dfcd-4700-8940-e6d1754e3e54\") " pod="openstack/cinder-api-0" Oct 11 05:08:37 crc kubenswrapper[4651]: I1011 05:08:37.417889 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d61d0161-dfcd-4700-8940-e6d1754e3e54-config-data-custom\") pod \"cinder-api-0\" (UID: \"d61d0161-dfcd-4700-8940-e6d1754e3e54\") " pod="openstack/cinder-api-0" Oct 11 05:08:37 crc kubenswrapper[4651]: I1011 05:08:37.418493 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d61d0161-dfcd-4700-8940-e6d1754e3e54-logs\") pod \"cinder-api-0\" (UID: \"d61d0161-dfcd-4700-8940-e6d1754e3e54\") " pod="openstack/cinder-api-0" Oct 11 05:08:37 crc kubenswrapper[4651]: I1011 05:08:37.421283 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d61d0161-dfcd-4700-8940-e6d1754e3e54-scripts\") pod \"cinder-api-0\" (UID: \"d61d0161-dfcd-4700-8940-e6d1754e3e54\") " pod="openstack/cinder-api-0" Oct 11 05:08:37 crc kubenswrapper[4651]: I1011 05:08:37.421458 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d61d0161-dfcd-4700-8940-e6d1754e3e54-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"d61d0161-dfcd-4700-8940-e6d1754e3e54\") " pod="openstack/cinder-api-0" Oct 11 05:08:37 crc kubenswrapper[4651]: I1011 05:08:37.424601 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d61d0161-dfcd-4700-8940-e6d1754e3e54-config-data\") pod \"cinder-api-0\" (UID: \"d61d0161-dfcd-4700-8940-e6d1754e3e54\") " pod="openstack/cinder-api-0" Oct 11 05:08:37 crc kubenswrapper[4651]: I1011 05:08:37.433310 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6bwn5\" (UniqueName: \"kubernetes.io/projected/d61d0161-dfcd-4700-8940-e6d1754e3e54-kube-api-access-6bwn5\") pod \"cinder-api-0\" (UID: \"d61d0161-dfcd-4700-8940-e6d1754e3e54\") " pod="openstack/cinder-api-0" Oct 11 05:08:37 crc kubenswrapper[4651]: I1011 05:08:37.445645 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-688c87cc99-nmtk4" Oct 11 05:08:37 crc kubenswrapper[4651]: I1011 05:08:37.482445 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bb4fc677f-n8cw8" Oct 11 05:08:37 crc kubenswrapper[4651]: I1011 05:08:37.509619 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 11 05:08:37 crc kubenswrapper[4651]: I1011 05:08:37.537498 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0c69b234-aa8e-440b-b730-b901ebe0a7b1","Type":"ContainerStarted","Data":"d9c59e5c3243b58d6045dcae171ee06c82633ecf88e770bd30814b5938e504d2"} Oct 11 05:08:37 crc kubenswrapper[4651]: I1011 05:08:37.538969 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 11 05:08:37 crc kubenswrapper[4651]: I1011 05:08:37.548460 4651 generic.go:334] "Generic (PLEG): container finished" podID="449ef777-d7ea-4103-be80-917dca65751f" containerID="e11ef0c5ab87a73e934163863a1bea0b8b3d8f0c49c7642b55e98262ad4852cf" exitCode=0 Oct 11 05:08:37 crc kubenswrapper[4651]: I1011 05:08:37.548505 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-688c87cc99-nmtk4" event={"ID":"449ef777-d7ea-4103-be80-917dca65751f","Type":"ContainerDied","Data":"e11ef0c5ab87a73e934163863a1bea0b8b3d8f0c49c7642b55e98262ad4852cf"} Oct 11 05:08:37 crc kubenswrapper[4651]: I1011 05:08:37.548561 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-688c87cc99-nmtk4" event={"ID":"449ef777-d7ea-4103-be80-917dca65751f","Type":"ContainerDied","Data":"43aa7d9377aa3c716312382a505a98bf04ded736d9b24bef597e095c0f213cf5"} Oct 11 05:08:37 crc kubenswrapper[4651]: I1011 05:08:37.548582 4651 scope.go:117] "RemoveContainer" containerID="e11ef0c5ab87a73e934163863a1bea0b8b3d8f0c49c7642b55e98262ad4852cf" Oct 11 05:08:37 crc kubenswrapper[4651]: I1011 05:08:37.548802 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-688c87cc99-nmtk4" Oct 11 05:08:37 crc kubenswrapper[4651]: I1011 05:08:37.575071 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.241052447 podStartE2EDuration="5.574812288s" podCreationTimestamp="2025-10-11 05:08:32 +0000 UTC" firstStartedPulling="2025-10-11 05:08:33.500669867 +0000 UTC m=+1034.396902663" lastFinishedPulling="2025-10-11 05:08:36.834429708 +0000 UTC m=+1037.730662504" observedRunningTime="2025-10-11 05:08:37.566271841 +0000 UTC m=+1038.462504637" watchObservedRunningTime="2025-10-11 05:08:37.574812288 +0000 UTC m=+1038.471045084" Oct 11 05:08:37 crc kubenswrapper[4651]: I1011 05:08:37.615615 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/449ef777-d7ea-4103-be80-917dca65751f-config\") pod \"449ef777-d7ea-4103-be80-917dca65751f\" (UID: \"449ef777-d7ea-4103-be80-917dca65751f\") " Oct 11 05:08:37 crc kubenswrapper[4651]: I1011 05:08:37.615673 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/449ef777-d7ea-4103-be80-917dca65751f-ovsdbserver-sb\") pod \"449ef777-d7ea-4103-be80-917dca65751f\" (UID: \"449ef777-d7ea-4103-be80-917dca65751f\") " Oct 11 05:08:37 crc kubenswrapper[4651]: I1011 05:08:37.615738 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/449ef777-d7ea-4103-be80-917dca65751f-ovsdbserver-nb\") pod \"449ef777-d7ea-4103-be80-917dca65751f\" (UID: \"449ef777-d7ea-4103-be80-917dca65751f\") " Oct 11 05:08:37 crc kubenswrapper[4651]: I1011 05:08:37.615858 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/449ef777-d7ea-4103-be80-917dca65751f-dns-swift-storage-0\") pod \"449ef777-d7ea-4103-be80-917dca65751f\" (UID: \"449ef777-d7ea-4103-be80-917dca65751f\") " Oct 11 05:08:37 crc kubenswrapper[4651]: I1011 05:08:37.615906 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tldqg\" (UniqueName: \"kubernetes.io/projected/449ef777-d7ea-4103-be80-917dca65751f-kube-api-access-tldqg\") pod \"449ef777-d7ea-4103-be80-917dca65751f\" (UID: \"449ef777-d7ea-4103-be80-917dca65751f\") " Oct 11 05:08:37 crc kubenswrapper[4651]: I1011 05:08:37.615929 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/449ef777-d7ea-4103-be80-917dca65751f-dns-svc\") pod \"449ef777-d7ea-4103-be80-917dca65751f\" (UID: \"449ef777-d7ea-4103-be80-917dca65751f\") " Oct 11 05:08:37 crc kubenswrapper[4651]: I1011 05:08:37.618630 4651 scope.go:117] "RemoveContainer" containerID="9d44b32769f0c427c8c603745ad60c500fbcd0ee1db183bf8b24c817e544662b" Oct 11 05:08:37 crc kubenswrapper[4651]: I1011 05:08:37.630976 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/449ef777-d7ea-4103-be80-917dca65751f-kube-api-access-tldqg" (OuterVolumeSpecName: "kube-api-access-tldqg") pod "449ef777-d7ea-4103-be80-917dca65751f" (UID: "449ef777-d7ea-4103-be80-917dca65751f"). InnerVolumeSpecName "kube-api-access-tldqg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:08:37 crc kubenswrapper[4651]: I1011 05:08:37.669242 4651 scope.go:117] "RemoveContainer" containerID="e11ef0c5ab87a73e934163863a1bea0b8b3d8f0c49c7642b55e98262ad4852cf" Oct 11 05:08:37 crc kubenswrapper[4651]: E1011 05:08:37.673224 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e11ef0c5ab87a73e934163863a1bea0b8b3d8f0c49c7642b55e98262ad4852cf\": container with ID starting with e11ef0c5ab87a73e934163863a1bea0b8b3d8f0c49c7642b55e98262ad4852cf not found: ID does not exist" containerID="e11ef0c5ab87a73e934163863a1bea0b8b3d8f0c49c7642b55e98262ad4852cf" Oct 11 05:08:37 crc kubenswrapper[4651]: I1011 05:08:37.673265 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e11ef0c5ab87a73e934163863a1bea0b8b3d8f0c49c7642b55e98262ad4852cf"} err="failed to get container status \"e11ef0c5ab87a73e934163863a1bea0b8b3d8f0c49c7642b55e98262ad4852cf\": rpc error: code = NotFound desc = could not find container \"e11ef0c5ab87a73e934163863a1bea0b8b3d8f0c49c7642b55e98262ad4852cf\": container with ID starting with e11ef0c5ab87a73e934163863a1bea0b8b3d8f0c49c7642b55e98262ad4852cf not found: ID does not exist" Oct 11 05:08:37 crc kubenswrapper[4651]: I1011 05:08:37.673329 4651 scope.go:117] "RemoveContainer" containerID="9d44b32769f0c427c8c603745ad60c500fbcd0ee1db183bf8b24c817e544662b" Oct 11 05:08:37 crc kubenswrapper[4651]: E1011 05:08:37.676407 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9d44b32769f0c427c8c603745ad60c500fbcd0ee1db183bf8b24c817e544662b\": container with ID starting with 9d44b32769f0c427c8c603745ad60c500fbcd0ee1db183bf8b24c817e544662b not found: ID does not exist" containerID="9d44b32769f0c427c8c603745ad60c500fbcd0ee1db183bf8b24c817e544662b" Oct 11 05:08:37 crc kubenswrapper[4651]: I1011 05:08:37.676453 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9d44b32769f0c427c8c603745ad60c500fbcd0ee1db183bf8b24c817e544662b"} err="failed to get container status \"9d44b32769f0c427c8c603745ad60c500fbcd0ee1db183bf8b24c817e544662b\": rpc error: code = NotFound desc = could not find container \"9d44b32769f0c427c8c603745ad60c500fbcd0ee1db183bf8b24c817e544662b\": container with ID starting with 9d44b32769f0c427c8c603745ad60c500fbcd0ee1db183bf8b24c817e544662b not found: ID does not exist" Oct 11 05:08:37 crc kubenswrapper[4651]: I1011 05:08:37.697417 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/449ef777-d7ea-4103-be80-917dca65751f-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "449ef777-d7ea-4103-be80-917dca65751f" (UID: "449ef777-d7ea-4103-be80-917dca65751f"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:08:37 crc kubenswrapper[4651]: I1011 05:08:37.702404 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/449ef777-d7ea-4103-be80-917dca65751f-config" (OuterVolumeSpecName: "config") pod "449ef777-d7ea-4103-be80-917dca65751f" (UID: "449ef777-d7ea-4103-be80-917dca65751f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:08:37 crc kubenswrapper[4651]: I1011 05:08:37.717503 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tldqg\" (UniqueName: \"kubernetes.io/projected/449ef777-d7ea-4103-be80-917dca65751f-kube-api-access-tldqg\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:37 crc kubenswrapper[4651]: I1011 05:08:37.717851 4651 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/449ef777-d7ea-4103-be80-917dca65751f-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:37 crc kubenswrapper[4651]: I1011 05:08:37.717863 4651 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/449ef777-d7ea-4103-be80-917dca65751f-config\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:37 crc kubenswrapper[4651]: I1011 05:08:37.745315 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/449ef777-d7ea-4103-be80-917dca65751f-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "449ef777-d7ea-4103-be80-917dca65751f" (UID: "449ef777-d7ea-4103-be80-917dca65751f"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:08:37 crc kubenswrapper[4651]: I1011 05:08:37.746441 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/449ef777-d7ea-4103-be80-917dca65751f-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "449ef777-d7ea-4103-be80-917dca65751f" (UID: "449ef777-d7ea-4103-be80-917dca65751f"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:08:37 crc kubenswrapper[4651]: I1011 05:08:37.779276 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 11 05:08:37 crc kubenswrapper[4651]: W1011 05:08:37.785082 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2df51120_c272_4eb3_bbdf_85c2753f7640.slice/crio-c252263229b61ae1be17e471ba26fa0bc466328846e216ef35ae54957ff0d755 WatchSource:0}: Error finding container c252263229b61ae1be17e471ba26fa0bc466328846e216ef35ae54957ff0d755: Status 404 returned error can't find the container with id c252263229b61ae1be17e471ba26fa0bc466328846e216ef35ae54957ff0d755 Oct 11 05:08:37 crc kubenswrapper[4651]: I1011 05:08:37.786212 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/449ef777-d7ea-4103-be80-917dca65751f-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "449ef777-d7ea-4103-be80-917dca65751f" (UID: "449ef777-d7ea-4103-be80-917dca65751f"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:08:37 crc kubenswrapper[4651]: I1011 05:08:37.824991 4651 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/449ef777-d7ea-4103-be80-917dca65751f-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:37 crc kubenswrapper[4651]: I1011 05:08:37.825025 4651 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/449ef777-d7ea-4103-be80-917dca65751f-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:37 crc kubenswrapper[4651]: I1011 05:08:37.825033 4651 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/449ef777-d7ea-4103-be80-917dca65751f-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:38 crc kubenswrapper[4651]: I1011 05:08:38.019594 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Oct 11 05:08:38 crc kubenswrapper[4651]: I1011 05:08:38.085879 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-688c87cc99-nmtk4"] Oct 11 05:08:38 crc kubenswrapper[4651]: I1011 05:08:38.109535 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-688c87cc99-nmtk4"] Oct 11 05:08:38 crc kubenswrapper[4651]: I1011 05:08:38.116506 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6bb4fc677f-n8cw8"] Oct 11 05:08:38 crc kubenswrapper[4651]: I1011 05:08:38.265659 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-75955bb549-s67lw" Oct 11 05:08:38 crc kubenswrapper[4651]: I1011 05:08:38.439494 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2fm4g\" (UniqueName: \"kubernetes.io/projected/74e74db5-efd0-4198-82a1-ef76d751d1de-kube-api-access-2fm4g\") pod \"74e74db5-efd0-4198-82a1-ef76d751d1de\" (UID: \"74e74db5-efd0-4198-82a1-ef76d751d1de\") " Oct 11 05:08:38 crc kubenswrapper[4651]: I1011 05:08:38.440323 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/74e74db5-efd0-4198-82a1-ef76d751d1de-config-data\") pod \"74e74db5-efd0-4198-82a1-ef76d751d1de\" (UID: \"74e74db5-efd0-4198-82a1-ef76d751d1de\") " Oct 11 05:08:38 crc kubenswrapper[4651]: I1011 05:08:38.440391 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/74e74db5-efd0-4198-82a1-ef76d751d1de-horizon-secret-key\") pod \"74e74db5-efd0-4198-82a1-ef76d751d1de\" (UID: \"74e74db5-efd0-4198-82a1-ef76d751d1de\") " Oct 11 05:08:38 crc kubenswrapper[4651]: I1011 05:08:38.440485 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/74e74db5-efd0-4198-82a1-ef76d751d1de-scripts\") pod \"74e74db5-efd0-4198-82a1-ef76d751d1de\" (UID: \"74e74db5-efd0-4198-82a1-ef76d751d1de\") " Oct 11 05:08:38 crc kubenswrapper[4651]: I1011 05:08:38.440633 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/74e74db5-efd0-4198-82a1-ef76d751d1de-logs\") pod \"74e74db5-efd0-4198-82a1-ef76d751d1de\" (UID: \"74e74db5-efd0-4198-82a1-ef76d751d1de\") " Oct 11 05:08:38 crc kubenswrapper[4651]: I1011 05:08:38.442214 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/74e74db5-efd0-4198-82a1-ef76d751d1de-logs" (OuterVolumeSpecName: "logs") pod "74e74db5-efd0-4198-82a1-ef76d751d1de" (UID: "74e74db5-efd0-4198-82a1-ef76d751d1de"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 05:08:38 crc kubenswrapper[4651]: I1011 05:08:38.446972 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/74e74db5-efd0-4198-82a1-ef76d751d1de-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "74e74db5-efd0-4198-82a1-ef76d751d1de" (UID: "74e74db5-efd0-4198-82a1-ef76d751d1de"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:08:38 crc kubenswrapper[4651]: I1011 05:08:38.447031 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/74e74db5-efd0-4198-82a1-ef76d751d1de-kube-api-access-2fm4g" (OuterVolumeSpecName: "kube-api-access-2fm4g") pod "74e74db5-efd0-4198-82a1-ef76d751d1de" (UID: "74e74db5-efd0-4198-82a1-ef76d751d1de"). InnerVolumeSpecName "kube-api-access-2fm4g". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:08:38 crc kubenswrapper[4651]: I1011 05:08:38.483928 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/74e74db5-efd0-4198-82a1-ef76d751d1de-config-data" (OuterVolumeSpecName: "config-data") pod "74e74db5-efd0-4198-82a1-ef76d751d1de" (UID: "74e74db5-efd0-4198-82a1-ef76d751d1de"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:08:38 crc kubenswrapper[4651]: I1011 05:08:38.484609 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/74e74db5-efd0-4198-82a1-ef76d751d1de-scripts" (OuterVolumeSpecName: "scripts") pod "74e74db5-efd0-4198-82a1-ef76d751d1de" (UID: "74e74db5-efd0-4198-82a1-ef76d751d1de"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:08:38 crc kubenswrapper[4651]: I1011 05:08:38.548208 4651 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/74e74db5-efd0-4198-82a1-ef76d751d1de-scripts\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:38 crc kubenswrapper[4651]: I1011 05:08:38.548239 4651 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/74e74db5-efd0-4198-82a1-ef76d751d1de-logs\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:38 crc kubenswrapper[4651]: I1011 05:08:38.548249 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2fm4g\" (UniqueName: \"kubernetes.io/projected/74e74db5-efd0-4198-82a1-ef76d751d1de-kube-api-access-2fm4g\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:38 crc kubenswrapper[4651]: I1011 05:08:38.548259 4651 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/74e74db5-efd0-4198-82a1-ef76d751d1de-config-data\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:38 crc kubenswrapper[4651]: I1011 05:08:38.548267 4651 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/74e74db5-efd0-4198-82a1-ef76d751d1de-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:38 crc kubenswrapper[4651]: I1011 05:08:38.559180 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-68976f6bc6-9jl66" Oct 11 05:08:38 crc kubenswrapper[4651]: I1011 05:08:38.567941 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"d61d0161-dfcd-4700-8940-e6d1754e3e54","Type":"ContainerStarted","Data":"30f2edc6abef8a447f214416539e21c7f9cd707d3ce1e66d82861c624c7ad7f2"} Oct 11 05:08:38 crc kubenswrapper[4651]: I1011 05:08:38.569471 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"2df51120-c272-4eb3-bbdf-85c2753f7640","Type":"ContainerStarted","Data":"c252263229b61ae1be17e471ba26fa0bc466328846e216ef35ae54957ff0d755"} Oct 11 05:08:38 crc kubenswrapper[4651]: I1011 05:08:38.572110 4651 generic.go:334] "Generic (PLEG): container finished" podID="74e74db5-efd0-4198-82a1-ef76d751d1de" containerID="a264ec5f5afb5d6db4164d085243e37cc4fcbe01b71b5337d59dab6d14834176" exitCode=137 Oct 11 05:08:38 crc kubenswrapper[4651]: I1011 05:08:38.572136 4651 generic.go:334] "Generic (PLEG): container finished" podID="74e74db5-efd0-4198-82a1-ef76d751d1de" containerID="5d83667756f591ba7d0455576d7fbf6bc31819811cfe61dbada388b269524281" exitCode=137 Oct 11 05:08:38 crc kubenswrapper[4651]: I1011 05:08:38.572182 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-75955bb549-s67lw" Oct 11 05:08:38 crc kubenswrapper[4651]: I1011 05:08:38.572187 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-75955bb549-s67lw" event={"ID":"74e74db5-efd0-4198-82a1-ef76d751d1de","Type":"ContainerDied","Data":"a264ec5f5afb5d6db4164d085243e37cc4fcbe01b71b5337d59dab6d14834176"} Oct 11 05:08:38 crc kubenswrapper[4651]: I1011 05:08:38.572291 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-75955bb549-s67lw" event={"ID":"74e74db5-efd0-4198-82a1-ef76d751d1de","Type":"ContainerDied","Data":"5d83667756f591ba7d0455576d7fbf6bc31819811cfe61dbada388b269524281"} Oct 11 05:08:38 crc kubenswrapper[4651]: I1011 05:08:38.572309 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-75955bb549-s67lw" event={"ID":"74e74db5-efd0-4198-82a1-ef76d751d1de","Type":"ContainerDied","Data":"6eda59e5432c906cfd923da5195c9794867b3062565e7df3fd54edf56d9fdbf6"} Oct 11 05:08:38 crc kubenswrapper[4651]: I1011 05:08:38.572328 4651 scope.go:117] "RemoveContainer" containerID="a264ec5f5afb5d6db4164d085243e37cc4fcbe01b71b5337d59dab6d14834176" Oct 11 05:08:38 crc kubenswrapper[4651]: I1011 05:08:38.583044 4651 generic.go:334] "Generic (PLEG): container finished" podID="482a99d8-100a-4f9c-9cd2-2834cd349de7" containerID="8482bdca65317d8471325f307d0f1fc8137813a2ca1d91ee8ebc92194f7143b7" exitCode=0 Oct 11 05:08:38 crc kubenswrapper[4651]: I1011 05:08:38.584343 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bb4fc677f-n8cw8" event={"ID":"482a99d8-100a-4f9c-9cd2-2834cd349de7","Type":"ContainerDied","Data":"8482bdca65317d8471325f307d0f1fc8137813a2ca1d91ee8ebc92194f7143b7"} Oct 11 05:08:38 crc kubenswrapper[4651]: I1011 05:08:38.584377 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bb4fc677f-n8cw8" event={"ID":"482a99d8-100a-4f9c-9cd2-2834cd349de7","Type":"ContainerStarted","Data":"a9ccde7492abea2963ecbf7f53efe5f9e99b22749485d2d8b8f73b5de519e598"} Oct 11 05:08:38 crc kubenswrapper[4651]: I1011 05:08:38.603846 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-79fc6b7784-n2xpf" Oct 11 05:08:38 crc kubenswrapper[4651]: I1011 05:08:38.683581 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-75955bb549-s67lw"] Oct 11 05:08:38 crc kubenswrapper[4651]: I1011 05:08:38.703847 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-75955bb549-s67lw"] Oct 11 05:08:38 crc kubenswrapper[4651]: I1011 05:08:38.953079 4651 scope.go:117] "RemoveContainer" containerID="5d83667756f591ba7d0455576d7fbf6bc31819811cfe61dbada388b269524281" Oct 11 05:08:39 crc kubenswrapper[4651]: I1011 05:08:39.163996 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-5f7d84485b-zb5s7" Oct 11 05:08:39 crc kubenswrapper[4651]: I1011 05:08:39.232094 4651 scope.go:117] "RemoveContainer" containerID="a264ec5f5afb5d6db4164d085243e37cc4fcbe01b71b5337d59dab6d14834176" Oct 11 05:08:39 crc kubenswrapper[4651]: E1011 05:08:39.233164 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a264ec5f5afb5d6db4164d085243e37cc4fcbe01b71b5337d59dab6d14834176\": container with ID starting with a264ec5f5afb5d6db4164d085243e37cc4fcbe01b71b5337d59dab6d14834176 not found: ID does not exist" containerID="a264ec5f5afb5d6db4164d085243e37cc4fcbe01b71b5337d59dab6d14834176" Oct 11 05:08:39 crc kubenswrapper[4651]: I1011 05:08:39.233191 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a264ec5f5afb5d6db4164d085243e37cc4fcbe01b71b5337d59dab6d14834176"} err="failed to get container status \"a264ec5f5afb5d6db4164d085243e37cc4fcbe01b71b5337d59dab6d14834176\": rpc error: code = NotFound desc = could not find container \"a264ec5f5afb5d6db4164d085243e37cc4fcbe01b71b5337d59dab6d14834176\": container with ID starting with a264ec5f5afb5d6db4164d085243e37cc4fcbe01b71b5337d59dab6d14834176 not found: ID does not exist" Oct 11 05:08:39 crc kubenswrapper[4651]: I1011 05:08:39.233209 4651 scope.go:117] "RemoveContainer" containerID="5d83667756f591ba7d0455576d7fbf6bc31819811cfe61dbada388b269524281" Oct 11 05:08:39 crc kubenswrapper[4651]: E1011 05:08:39.233664 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5d83667756f591ba7d0455576d7fbf6bc31819811cfe61dbada388b269524281\": container with ID starting with 5d83667756f591ba7d0455576d7fbf6bc31819811cfe61dbada388b269524281 not found: ID does not exist" containerID="5d83667756f591ba7d0455576d7fbf6bc31819811cfe61dbada388b269524281" Oct 11 05:08:39 crc kubenswrapper[4651]: I1011 05:08:39.233687 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5d83667756f591ba7d0455576d7fbf6bc31819811cfe61dbada388b269524281"} err="failed to get container status \"5d83667756f591ba7d0455576d7fbf6bc31819811cfe61dbada388b269524281\": rpc error: code = NotFound desc = could not find container \"5d83667756f591ba7d0455576d7fbf6bc31819811cfe61dbada388b269524281\": container with ID starting with 5d83667756f591ba7d0455576d7fbf6bc31819811cfe61dbada388b269524281 not found: ID does not exist" Oct 11 05:08:39 crc kubenswrapper[4651]: I1011 05:08:39.233702 4651 scope.go:117] "RemoveContainer" containerID="a264ec5f5afb5d6db4164d085243e37cc4fcbe01b71b5337d59dab6d14834176" Oct 11 05:08:39 crc kubenswrapper[4651]: I1011 05:08:39.234094 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a264ec5f5afb5d6db4164d085243e37cc4fcbe01b71b5337d59dab6d14834176"} err="failed to get container status \"a264ec5f5afb5d6db4164d085243e37cc4fcbe01b71b5337d59dab6d14834176\": rpc error: code = NotFound desc = could not find container \"a264ec5f5afb5d6db4164d085243e37cc4fcbe01b71b5337d59dab6d14834176\": container with ID starting with a264ec5f5afb5d6db4164d085243e37cc4fcbe01b71b5337d59dab6d14834176 not found: ID does not exist" Oct 11 05:08:39 crc kubenswrapper[4651]: I1011 05:08:39.234112 4651 scope.go:117] "RemoveContainer" containerID="5d83667756f591ba7d0455576d7fbf6bc31819811cfe61dbada388b269524281" Oct 11 05:08:39 crc kubenswrapper[4651]: I1011 05:08:39.234345 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5d83667756f591ba7d0455576d7fbf6bc31819811cfe61dbada388b269524281"} err="failed to get container status \"5d83667756f591ba7d0455576d7fbf6bc31819811cfe61dbada388b269524281\": rpc error: code = NotFound desc = could not find container \"5d83667756f591ba7d0455576d7fbf6bc31819811cfe61dbada388b269524281\": container with ID starting with 5d83667756f591ba7d0455576d7fbf6bc31819811cfe61dbada388b269524281 not found: ID does not exist" Oct 11 05:08:39 crc kubenswrapper[4651]: I1011 05:08:39.596547 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bb4fc677f-n8cw8" event={"ID":"482a99d8-100a-4f9c-9cd2-2834cd349de7","Type":"ContainerStarted","Data":"210573dc255d42d525fbf537a18cc0e583cf0b196696ef0450da8440358b0e41"} Oct 11 05:08:39 crc kubenswrapper[4651]: I1011 05:08:39.596630 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6bb4fc677f-n8cw8" Oct 11 05:08:39 crc kubenswrapper[4651]: I1011 05:08:39.614067 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"d61d0161-dfcd-4700-8940-e6d1754e3e54","Type":"ContainerStarted","Data":"f50c12d145d160a363b32a9c565044a8163b01106340ffb65b7f9d8f8eac6356"} Oct 11 05:08:39 crc kubenswrapper[4651]: I1011 05:08:39.626641 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6bb4fc677f-n8cw8" podStartSLOduration=3.626625359 podStartE2EDuration="3.626625359s" podCreationTimestamp="2025-10-11 05:08:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 05:08:39.622280788 +0000 UTC m=+1040.518513594" watchObservedRunningTime="2025-10-11 05:08:39.626625359 +0000 UTC m=+1040.522858155" Oct 11 05:08:39 crc kubenswrapper[4651]: I1011 05:08:39.690308 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Oct 11 05:08:39 crc kubenswrapper[4651]: I1011 05:08:39.903056 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="449ef777-d7ea-4103-be80-917dca65751f" path="/var/lib/kubelet/pods/449ef777-d7ea-4103-be80-917dca65751f/volumes" Oct 11 05:08:39 crc kubenswrapper[4651]: I1011 05:08:39.905707 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="74e74db5-efd0-4198-82a1-ef76d751d1de" path="/var/lib/kubelet/pods/74e74db5-efd0-4198-82a1-ef76d751d1de/volumes" Oct 11 05:08:40 crc kubenswrapper[4651]: I1011 05:08:40.239233 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-6f5cf48bd-n6csr" Oct 11 05:08:40 crc kubenswrapper[4651]: I1011 05:08:40.398232 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-6f5cf48bd-n6csr" Oct 11 05:08:40 crc kubenswrapper[4651]: I1011 05:08:40.683564 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"2df51120-c272-4eb3-bbdf-85c2753f7640","Type":"ContainerStarted","Data":"7cc1b454f8974566f2d64eb7fbae7b6423341070930fb72c9ca263ff19af5649"} Oct 11 05:08:40 crc kubenswrapper[4651]: I1011 05:08:40.703065 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="d61d0161-dfcd-4700-8940-e6d1754e3e54" containerName="cinder-api-log" containerID="cri-o://f50c12d145d160a363b32a9c565044a8163b01106340ffb65b7f9d8f8eac6356" gracePeriod=30 Oct 11 05:08:40 crc kubenswrapper[4651]: I1011 05:08:40.703401 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="d61d0161-dfcd-4700-8940-e6d1754e3e54" containerName="cinder-api" containerID="cri-o://0e14ef895a7b6579cc4e4c3e6064b844f7a2f754f135a8520a18a6edef571227" gracePeriod=30 Oct 11 05:08:40 crc kubenswrapper[4651]: I1011 05:08:40.703855 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"d61d0161-dfcd-4700-8940-e6d1754e3e54","Type":"ContainerStarted","Data":"0e14ef895a7b6579cc4e4c3e6064b844f7a2f754f135a8520a18a6edef571227"} Oct 11 05:08:40 crc kubenswrapper[4651]: I1011 05:08:40.703927 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Oct 11 05:08:40 crc kubenswrapper[4651]: I1011 05:08:40.728714 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=3.728698982 podStartE2EDuration="3.728698982s" podCreationTimestamp="2025-10-11 05:08:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 05:08:40.728138188 +0000 UTC m=+1041.624370984" watchObservedRunningTime="2025-10-11 05:08:40.728698982 +0000 UTC m=+1041.624931778" Oct 11 05:08:40 crc kubenswrapper[4651]: I1011 05:08:40.851226 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-68976f6bc6-9jl66" Oct 11 05:08:40 crc kubenswrapper[4651]: I1011 05:08:40.993213 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-6856c774b5-fq9r6" Oct 11 05:08:41 crc kubenswrapper[4651]: I1011 05:08:41.082792 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-79fc6b7784-n2xpf"] Oct 11 05:08:41 crc kubenswrapper[4651]: I1011 05:08:41.083522 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-79fc6b7784-n2xpf" podUID="84763f01-3ff4-49ae-a364-e54b62308ff0" containerName="neutron-api" containerID="cri-o://ece7b2fef8409b80f241001509dfa5bc31212c2afe506cd86038c7f07a958996" gracePeriod=30 Oct 11 05:08:41 crc kubenswrapper[4651]: I1011 05:08:41.083858 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-79fc6b7784-n2xpf" podUID="84763f01-3ff4-49ae-a364-e54b62308ff0" containerName="neutron-httpd" containerID="cri-o://2588ea3b17498f8c9979d7197450c91ed307a1b5ddb07ef32f2476c9b05053af" gracePeriod=30 Oct 11 05:08:41 crc kubenswrapper[4651]: I1011 05:08:41.496180 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 11 05:08:41 crc kubenswrapper[4651]: I1011 05:08:41.624136 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6bwn5\" (UniqueName: \"kubernetes.io/projected/d61d0161-dfcd-4700-8940-e6d1754e3e54-kube-api-access-6bwn5\") pod \"d61d0161-dfcd-4700-8940-e6d1754e3e54\" (UID: \"d61d0161-dfcd-4700-8940-e6d1754e3e54\") " Oct 11 05:08:41 crc kubenswrapper[4651]: I1011 05:08:41.624190 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d61d0161-dfcd-4700-8940-e6d1754e3e54-combined-ca-bundle\") pod \"d61d0161-dfcd-4700-8940-e6d1754e3e54\" (UID: \"d61d0161-dfcd-4700-8940-e6d1754e3e54\") " Oct 11 05:08:41 crc kubenswrapper[4651]: I1011 05:08:41.624228 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d61d0161-dfcd-4700-8940-e6d1754e3e54-config-data\") pod \"d61d0161-dfcd-4700-8940-e6d1754e3e54\" (UID: \"d61d0161-dfcd-4700-8940-e6d1754e3e54\") " Oct 11 05:08:41 crc kubenswrapper[4651]: I1011 05:08:41.624286 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/d61d0161-dfcd-4700-8940-e6d1754e3e54-etc-machine-id\") pod \"d61d0161-dfcd-4700-8940-e6d1754e3e54\" (UID: \"d61d0161-dfcd-4700-8940-e6d1754e3e54\") " Oct 11 05:08:41 crc kubenswrapper[4651]: I1011 05:08:41.624325 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d61d0161-dfcd-4700-8940-e6d1754e3e54-config-data-custom\") pod \"d61d0161-dfcd-4700-8940-e6d1754e3e54\" (UID: \"d61d0161-dfcd-4700-8940-e6d1754e3e54\") " Oct 11 05:08:41 crc kubenswrapper[4651]: I1011 05:08:41.624423 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d61d0161-dfcd-4700-8940-e6d1754e3e54-scripts\") pod \"d61d0161-dfcd-4700-8940-e6d1754e3e54\" (UID: \"d61d0161-dfcd-4700-8940-e6d1754e3e54\") " Oct 11 05:08:41 crc kubenswrapper[4651]: I1011 05:08:41.624468 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d61d0161-dfcd-4700-8940-e6d1754e3e54-logs\") pod \"d61d0161-dfcd-4700-8940-e6d1754e3e54\" (UID: \"d61d0161-dfcd-4700-8940-e6d1754e3e54\") " Oct 11 05:08:41 crc kubenswrapper[4651]: I1011 05:08:41.625276 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d61d0161-dfcd-4700-8940-e6d1754e3e54-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "d61d0161-dfcd-4700-8940-e6d1754e3e54" (UID: "d61d0161-dfcd-4700-8940-e6d1754e3e54"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 11 05:08:41 crc kubenswrapper[4651]: I1011 05:08:41.626308 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d61d0161-dfcd-4700-8940-e6d1754e3e54-logs" (OuterVolumeSpecName: "logs") pod "d61d0161-dfcd-4700-8940-e6d1754e3e54" (UID: "d61d0161-dfcd-4700-8940-e6d1754e3e54"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 05:08:41 crc kubenswrapper[4651]: I1011 05:08:41.642002 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d61d0161-dfcd-4700-8940-e6d1754e3e54-scripts" (OuterVolumeSpecName: "scripts") pod "d61d0161-dfcd-4700-8940-e6d1754e3e54" (UID: "d61d0161-dfcd-4700-8940-e6d1754e3e54"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:08:41 crc kubenswrapper[4651]: I1011 05:08:41.677025 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d61d0161-dfcd-4700-8940-e6d1754e3e54-kube-api-access-6bwn5" (OuterVolumeSpecName: "kube-api-access-6bwn5") pod "d61d0161-dfcd-4700-8940-e6d1754e3e54" (UID: "d61d0161-dfcd-4700-8940-e6d1754e3e54"). InnerVolumeSpecName "kube-api-access-6bwn5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:08:41 crc kubenswrapper[4651]: I1011 05:08:41.684281 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d61d0161-dfcd-4700-8940-e6d1754e3e54-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "d61d0161-dfcd-4700-8940-e6d1754e3e54" (UID: "d61d0161-dfcd-4700-8940-e6d1754e3e54"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:08:41 crc kubenswrapper[4651]: I1011 05:08:41.692914 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d61d0161-dfcd-4700-8940-e6d1754e3e54-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d61d0161-dfcd-4700-8940-e6d1754e3e54" (UID: "d61d0161-dfcd-4700-8940-e6d1754e3e54"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:08:41 crc kubenswrapper[4651]: I1011 05:08:41.725059 4651 generic.go:334] "Generic (PLEG): container finished" podID="d61d0161-dfcd-4700-8940-e6d1754e3e54" containerID="0e14ef895a7b6579cc4e4c3e6064b844f7a2f754f135a8520a18a6edef571227" exitCode=0 Oct 11 05:08:41 crc kubenswrapper[4651]: I1011 05:08:41.725093 4651 generic.go:334] "Generic (PLEG): container finished" podID="d61d0161-dfcd-4700-8940-e6d1754e3e54" containerID="f50c12d145d160a363b32a9c565044a8163b01106340ffb65b7f9d8f8eac6356" exitCode=143 Oct 11 05:08:41 crc kubenswrapper[4651]: I1011 05:08:41.725147 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"d61d0161-dfcd-4700-8940-e6d1754e3e54","Type":"ContainerDied","Data":"0e14ef895a7b6579cc4e4c3e6064b844f7a2f754f135a8520a18a6edef571227"} Oct 11 05:08:41 crc kubenswrapper[4651]: I1011 05:08:41.725174 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"d61d0161-dfcd-4700-8940-e6d1754e3e54","Type":"ContainerDied","Data":"f50c12d145d160a363b32a9c565044a8163b01106340ffb65b7f9d8f8eac6356"} Oct 11 05:08:41 crc kubenswrapper[4651]: I1011 05:08:41.725186 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"d61d0161-dfcd-4700-8940-e6d1754e3e54","Type":"ContainerDied","Data":"30f2edc6abef8a447f214416539e21c7f9cd707d3ce1e66d82861c624c7ad7f2"} Oct 11 05:08:41 crc kubenswrapper[4651]: I1011 05:08:41.725201 4651 scope.go:117] "RemoveContainer" containerID="0e14ef895a7b6579cc4e4c3e6064b844f7a2f754f135a8520a18a6edef571227" Oct 11 05:08:41 crc kubenswrapper[4651]: I1011 05:08:41.725325 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 11 05:08:41 crc kubenswrapper[4651]: I1011 05:08:41.728072 4651 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d61d0161-dfcd-4700-8940-e6d1754e3e54-logs\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:41 crc kubenswrapper[4651]: I1011 05:08:41.728110 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6bwn5\" (UniqueName: \"kubernetes.io/projected/d61d0161-dfcd-4700-8940-e6d1754e3e54-kube-api-access-6bwn5\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:41 crc kubenswrapper[4651]: I1011 05:08:41.728120 4651 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d61d0161-dfcd-4700-8940-e6d1754e3e54-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:41 crc kubenswrapper[4651]: I1011 05:08:41.728131 4651 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/d61d0161-dfcd-4700-8940-e6d1754e3e54-etc-machine-id\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:41 crc kubenswrapper[4651]: I1011 05:08:41.728141 4651 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d61d0161-dfcd-4700-8940-e6d1754e3e54-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:41 crc kubenswrapper[4651]: I1011 05:08:41.728149 4651 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d61d0161-dfcd-4700-8940-e6d1754e3e54-scripts\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:41 crc kubenswrapper[4651]: I1011 05:08:41.742723 4651 generic.go:334] "Generic (PLEG): container finished" podID="84763f01-3ff4-49ae-a364-e54b62308ff0" containerID="2588ea3b17498f8c9979d7197450c91ed307a1b5ddb07ef32f2476c9b05053af" exitCode=0 Oct 11 05:08:41 crc kubenswrapper[4651]: I1011 05:08:41.742804 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-79fc6b7784-n2xpf" event={"ID":"84763f01-3ff4-49ae-a364-e54b62308ff0","Type":"ContainerDied","Data":"2588ea3b17498f8c9979d7197450c91ed307a1b5ddb07ef32f2476c9b05053af"} Oct 11 05:08:41 crc kubenswrapper[4651]: I1011 05:08:41.755969 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d61d0161-dfcd-4700-8940-e6d1754e3e54-config-data" (OuterVolumeSpecName: "config-data") pod "d61d0161-dfcd-4700-8940-e6d1754e3e54" (UID: "d61d0161-dfcd-4700-8940-e6d1754e3e54"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:08:41 crc kubenswrapper[4651]: I1011 05:08:41.759051 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-5f7d84485b-zb5s7" Oct 11 05:08:41 crc kubenswrapper[4651]: I1011 05:08:41.760281 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"2df51120-c272-4eb3-bbdf-85c2753f7640","Type":"ContainerStarted","Data":"2a85a10f3f7bab9e0694a732c911b4b71473716433fe3be528287dd0c0df4eef"} Oct 11 05:08:41 crc kubenswrapper[4651]: I1011 05:08:41.801021 4651 scope.go:117] "RemoveContainer" containerID="f50c12d145d160a363b32a9c565044a8163b01106340ffb65b7f9d8f8eac6356" Oct 11 05:08:41 crc kubenswrapper[4651]: I1011 05:08:41.866150 4651 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d61d0161-dfcd-4700-8940-e6d1754e3e54-config-data\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:41 crc kubenswrapper[4651]: I1011 05:08:41.885631 4651 scope.go:117] "RemoveContainer" containerID="0e14ef895a7b6579cc4e4c3e6064b844f7a2f754f135a8520a18a6edef571227" Oct 11 05:08:41 crc kubenswrapper[4651]: I1011 05:08:41.894746 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=4.653592702 podStartE2EDuration="5.894721824s" podCreationTimestamp="2025-10-11 05:08:36 +0000 UTC" firstStartedPulling="2025-10-11 05:08:37.792926409 +0000 UTC m=+1038.689159205" lastFinishedPulling="2025-10-11 05:08:39.034055531 +0000 UTC m=+1039.930288327" observedRunningTime="2025-10-11 05:08:41.833308711 +0000 UTC m=+1042.729541517" watchObservedRunningTime="2025-10-11 05:08:41.894721824 +0000 UTC m=+1042.790954640" Oct 11 05:08:41 crc kubenswrapper[4651]: E1011 05:08:41.900958 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0e14ef895a7b6579cc4e4c3e6064b844f7a2f754f135a8520a18a6edef571227\": container with ID starting with 0e14ef895a7b6579cc4e4c3e6064b844f7a2f754f135a8520a18a6edef571227 not found: ID does not exist" containerID="0e14ef895a7b6579cc4e4c3e6064b844f7a2f754f135a8520a18a6edef571227" Oct 11 05:08:41 crc kubenswrapper[4651]: I1011 05:08:41.901018 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0e14ef895a7b6579cc4e4c3e6064b844f7a2f754f135a8520a18a6edef571227"} err="failed to get container status \"0e14ef895a7b6579cc4e4c3e6064b844f7a2f754f135a8520a18a6edef571227\": rpc error: code = NotFound desc = could not find container \"0e14ef895a7b6579cc4e4c3e6064b844f7a2f754f135a8520a18a6edef571227\": container with ID starting with 0e14ef895a7b6579cc4e4c3e6064b844f7a2f754f135a8520a18a6edef571227 not found: ID does not exist" Oct 11 05:08:41 crc kubenswrapper[4651]: I1011 05:08:41.901044 4651 scope.go:117] "RemoveContainer" containerID="f50c12d145d160a363b32a9c565044a8163b01106340ffb65b7f9d8f8eac6356" Oct 11 05:08:41 crc kubenswrapper[4651]: E1011 05:08:41.903812 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f50c12d145d160a363b32a9c565044a8163b01106340ffb65b7f9d8f8eac6356\": container with ID starting with f50c12d145d160a363b32a9c565044a8163b01106340ffb65b7f9d8f8eac6356 not found: ID does not exist" containerID="f50c12d145d160a363b32a9c565044a8163b01106340ffb65b7f9d8f8eac6356" Oct 11 05:08:41 crc kubenswrapper[4651]: I1011 05:08:41.903868 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f50c12d145d160a363b32a9c565044a8163b01106340ffb65b7f9d8f8eac6356"} err="failed to get container status \"f50c12d145d160a363b32a9c565044a8163b01106340ffb65b7f9d8f8eac6356\": rpc error: code = NotFound desc = could not find container \"f50c12d145d160a363b32a9c565044a8163b01106340ffb65b7f9d8f8eac6356\": container with ID starting with f50c12d145d160a363b32a9c565044a8163b01106340ffb65b7f9d8f8eac6356 not found: ID does not exist" Oct 11 05:08:41 crc kubenswrapper[4651]: I1011 05:08:41.903883 4651 scope.go:117] "RemoveContainer" containerID="0e14ef895a7b6579cc4e4c3e6064b844f7a2f754f135a8520a18a6edef571227" Oct 11 05:08:41 crc kubenswrapper[4651]: I1011 05:08:41.914127 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0e14ef895a7b6579cc4e4c3e6064b844f7a2f754f135a8520a18a6edef571227"} err="failed to get container status \"0e14ef895a7b6579cc4e4c3e6064b844f7a2f754f135a8520a18a6edef571227\": rpc error: code = NotFound desc = could not find container \"0e14ef895a7b6579cc4e4c3e6064b844f7a2f754f135a8520a18a6edef571227\": container with ID starting with 0e14ef895a7b6579cc4e4c3e6064b844f7a2f754f135a8520a18a6edef571227 not found: ID does not exist" Oct 11 05:08:41 crc kubenswrapper[4651]: I1011 05:08:41.914154 4651 scope.go:117] "RemoveContainer" containerID="f50c12d145d160a363b32a9c565044a8163b01106340ffb65b7f9d8f8eac6356" Oct 11 05:08:41 crc kubenswrapper[4651]: I1011 05:08:41.916710 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f50c12d145d160a363b32a9c565044a8163b01106340ffb65b7f9d8f8eac6356"} err="failed to get container status \"f50c12d145d160a363b32a9c565044a8163b01106340ffb65b7f9d8f8eac6356\": rpc error: code = NotFound desc = could not find container \"f50c12d145d160a363b32a9c565044a8163b01106340ffb65b7f9d8f8eac6356\": container with ID starting with f50c12d145d160a363b32a9c565044a8163b01106340ffb65b7f9d8f8eac6356 not found: ID does not exist" Oct 11 05:08:41 crc kubenswrapper[4651]: I1011 05:08:41.952962 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-68976f6bc6-9jl66"] Oct 11 05:08:41 crc kubenswrapper[4651]: I1011 05:08:41.953232 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-68976f6bc6-9jl66" podUID="93e6a5fd-218d-44c2-9bf9-2796b1ff0c33" containerName="horizon-log" containerID="cri-o://321c90e912a2b24f9c5662f28a92c8cc11f803ff3e2b862071cbf01111dbe4be" gracePeriod=30 Oct 11 05:08:41 crc kubenswrapper[4651]: I1011 05:08:41.957322 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-68976f6bc6-9jl66" podUID="93e6a5fd-218d-44c2-9bf9-2796b1ff0c33" containerName="horizon" containerID="cri-o://e808ba8643e65bfa96c6d2b0dba9c582de46d8fb985cecf451cd1bb241335b5b" gracePeriod=30 Oct 11 05:08:42 crc kubenswrapper[4651]: I1011 05:08:42.102236 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Oct 11 05:08:42 crc kubenswrapper[4651]: I1011 05:08:42.123041 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Oct 11 05:08:42 crc kubenswrapper[4651]: I1011 05:08:42.139215 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Oct 11 05:08:42 crc kubenswrapper[4651]: E1011 05:08:42.139567 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d61d0161-dfcd-4700-8940-e6d1754e3e54" containerName="cinder-api-log" Oct 11 05:08:42 crc kubenswrapper[4651]: I1011 05:08:42.139582 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="d61d0161-dfcd-4700-8940-e6d1754e3e54" containerName="cinder-api-log" Oct 11 05:08:42 crc kubenswrapper[4651]: E1011 05:08:42.139599 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d61d0161-dfcd-4700-8940-e6d1754e3e54" containerName="cinder-api" Oct 11 05:08:42 crc kubenswrapper[4651]: I1011 05:08:42.139606 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="d61d0161-dfcd-4700-8940-e6d1754e3e54" containerName="cinder-api" Oct 11 05:08:42 crc kubenswrapper[4651]: E1011 05:08:42.139617 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="74e74db5-efd0-4198-82a1-ef76d751d1de" containerName="horizon-log" Oct 11 05:08:42 crc kubenswrapper[4651]: I1011 05:08:42.139623 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="74e74db5-efd0-4198-82a1-ef76d751d1de" containerName="horizon-log" Oct 11 05:08:42 crc kubenswrapper[4651]: E1011 05:08:42.139634 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="449ef777-d7ea-4103-be80-917dca65751f" containerName="dnsmasq-dns" Oct 11 05:08:42 crc kubenswrapper[4651]: I1011 05:08:42.139639 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="449ef777-d7ea-4103-be80-917dca65751f" containerName="dnsmasq-dns" Oct 11 05:08:42 crc kubenswrapper[4651]: E1011 05:08:42.139654 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="449ef777-d7ea-4103-be80-917dca65751f" containerName="init" Oct 11 05:08:42 crc kubenswrapper[4651]: I1011 05:08:42.139660 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="449ef777-d7ea-4103-be80-917dca65751f" containerName="init" Oct 11 05:08:42 crc kubenswrapper[4651]: E1011 05:08:42.139676 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="74e74db5-efd0-4198-82a1-ef76d751d1de" containerName="horizon" Oct 11 05:08:42 crc kubenswrapper[4651]: I1011 05:08:42.139681 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="74e74db5-efd0-4198-82a1-ef76d751d1de" containerName="horizon" Oct 11 05:08:42 crc kubenswrapper[4651]: I1011 05:08:42.139879 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="d61d0161-dfcd-4700-8940-e6d1754e3e54" containerName="cinder-api" Oct 11 05:08:42 crc kubenswrapper[4651]: I1011 05:08:42.139892 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="d61d0161-dfcd-4700-8940-e6d1754e3e54" containerName="cinder-api-log" Oct 11 05:08:42 crc kubenswrapper[4651]: I1011 05:08:42.139903 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="74e74db5-efd0-4198-82a1-ef76d751d1de" containerName="horizon-log" Oct 11 05:08:42 crc kubenswrapper[4651]: I1011 05:08:42.139917 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="449ef777-d7ea-4103-be80-917dca65751f" containerName="dnsmasq-dns" Oct 11 05:08:42 crc kubenswrapper[4651]: I1011 05:08:42.139929 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="74e74db5-efd0-4198-82a1-ef76d751d1de" containerName="horizon" Oct 11 05:08:42 crc kubenswrapper[4651]: I1011 05:08:42.140774 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 11 05:08:42 crc kubenswrapper[4651]: I1011 05:08:42.144563 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Oct 11 05:08:42 crc kubenswrapper[4651]: I1011 05:08:42.144770 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Oct 11 05:08:42 crc kubenswrapper[4651]: I1011 05:08:42.144910 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Oct 11 05:08:42 crc kubenswrapper[4651]: I1011 05:08:42.150110 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Oct 11 05:08:42 crc kubenswrapper[4651]: I1011 05:08:42.244258 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Oct 11 05:08:42 crc kubenswrapper[4651]: I1011 05:08:42.273484 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6b53ec0e-8ba3-47a6-9715-84fe1ca28ff1-logs\") pod \"cinder-api-0\" (UID: \"6b53ec0e-8ba3-47a6-9715-84fe1ca28ff1\") " pod="openstack/cinder-api-0" Oct 11 05:08:42 crc kubenswrapper[4651]: I1011 05:08:42.273954 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6b53ec0e-8ba3-47a6-9715-84fe1ca28ff1-public-tls-certs\") pod \"cinder-api-0\" (UID: \"6b53ec0e-8ba3-47a6-9715-84fe1ca28ff1\") " pod="openstack/cinder-api-0" Oct 11 05:08:42 crc kubenswrapper[4651]: I1011 05:08:42.274207 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b53ec0e-8ba3-47a6-9715-84fe1ca28ff1-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"6b53ec0e-8ba3-47a6-9715-84fe1ca28ff1\") " pod="openstack/cinder-api-0" Oct 11 05:08:42 crc kubenswrapper[4651]: I1011 05:08:42.274255 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b53ec0e-8ba3-47a6-9715-84fe1ca28ff1-config-data\") pod \"cinder-api-0\" (UID: \"6b53ec0e-8ba3-47a6-9715-84fe1ca28ff1\") " pod="openstack/cinder-api-0" Oct 11 05:08:42 crc kubenswrapper[4651]: I1011 05:08:42.274305 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b53ec0e-8ba3-47a6-9715-84fe1ca28ff1-scripts\") pod \"cinder-api-0\" (UID: \"6b53ec0e-8ba3-47a6-9715-84fe1ca28ff1\") " pod="openstack/cinder-api-0" Oct 11 05:08:42 crc kubenswrapper[4651]: I1011 05:08:42.274341 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6b53ec0e-8ba3-47a6-9715-84fe1ca28ff1-config-data-custom\") pod \"cinder-api-0\" (UID: \"6b53ec0e-8ba3-47a6-9715-84fe1ca28ff1\") " pod="openstack/cinder-api-0" Oct 11 05:08:42 crc kubenswrapper[4651]: I1011 05:08:42.274471 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kfkvd\" (UniqueName: \"kubernetes.io/projected/6b53ec0e-8ba3-47a6-9715-84fe1ca28ff1-kube-api-access-kfkvd\") pod \"cinder-api-0\" (UID: \"6b53ec0e-8ba3-47a6-9715-84fe1ca28ff1\") " pod="openstack/cinder-api-0" Oct 11 05:08:42 crc kubenswrapper[4651]: I1011 05:08:42.274505 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6b53ec0e-8ba3-47a6-9715-84fe1ca28ff1-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"6b53ec0e-8ba3-47a6-9715-84fe1ca28ff1\") " pod="openstack/cinder-api-0" Oct 11 05:08:42 crc kubenswrapper[4651]: I1011 05:08:42.274527 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6b53ec0e-8ba3-47a6-9715-84fe1ca28ff1-etc-machine-id\") pod \"cinder-api-0\" (UID: \"6b53ec0e-8ba3-47a6-9715-84fe1ca28ff1\") " pod="openstack/cinder-api-0" Oct 11 05:08:42 crc kubenswrapper[4651]: I1011 05:08:42.376509 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b53ec0e-8ba3-47a6-9715-84fe1ca28ff1-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"6b53ec0e-8ba3-47a6-9715-84fe1ca28ff1\") " pod="openstack/cinder-api-0" Oct 11 05:08:42 crc kubenswrapper[4651]: I1011 05:08:42.376546 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b53ec0e-8ba3-47a6-9715-84fe1ca28ff1-config-data\") pod \"cinder-api-0\" (UID: \"6b53ec0e-8ba3-47a6-9715-84fe1ca28ff1\") " pod="openstack/cinder-api-0" Oct 11 05:08:42 crc kubenswrapper[4651]: I1011 05:08:42.376574 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b53ec0e-8ba3-47a6-9715-84fe1ca28ff1-scripts\") pod \"cinder-api-0\" (UID: \"6b53ec0e-8ba3-47a6-9715-84fe1ca28ff1\") " pod="openstack/cinder-api-0" Oct 11 05:08:42 crc kubenswrapper[4651]: I1011 05:08:42.376595 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6b53ec0e-8ba3-47a6-9715-84fe1ca28ff1-config-data-custom\") pod \"cinder-api-0\" (UID: \"6b53ec0e-8ba3-47a6-9715-84fe1ca28ff1\") " pod="openstack/cinder-api-0" Oct 11 05:08:42 crc kubenswrapper[4651]: I1011 05:08:42.376649 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kfkvd\" (UniqueName: \"kubernetes.io/projected/6b53ec0e-8ba3-47a6-9715-84fe1ca28ff1-kube-api-access-kfkvd\") pod \"cinder-api-0\" (UID: \"6b53ec0e-8ba3-47a6-9715-84fe1ca28ff1\") " pod="openstack/cinder-api-0" Oct 11 05:08:42 crc kubenswrapper[4651]: I1011 05:08:42.376671 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6b53ec0e-8ba3-47a6-9715-84fe1ca28ff1-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"6b53ec0e-8ba3-47a6-9715-84fe1ca28ff1\") " pod="openstack/cinder-api-0" Oct 11 05:08:42 crc kubenswrapper[4651]: I1011 05:08:42.376686 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6b53ec0e-8ba3-47a6-9715-84fe1ca28ff1-etc-machine-id\") pod \"cinder-api-0\" (UID: \"6b53ec0e-8ba3-47a6-9715-84fe1ca28ff1\") " pod="openstack/cinder-api-0" Oct 11 05:08:42 crc kubenswrapper[4651]: I1011 05:08:42.376704 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6b53ec0e-8ba3-47a6-9715-84fe1ca28ff1-logs\") pod \"cinder-api-0\" (UID: \"6b53ec0e-8ba3-47a6-9715-84fe1ca28ff1\") " pod="openstack/cinder-api-0" Oct 11 05:08:42 crc kubenswrapper[4651]: I1011 05:08:42.376719 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6b53ec0e-8ba3-47a6-9715-84fe1ca28ff1-public-tls-certs\") pod \"cinder-api-0\" (UID: \"6b53ec0e-8ba3-47a6-9715-84fe1ca28ff1\") " pod="openstack/cinder-api-0" Oct 11 05:08:42 crc kubenswrapper[4651]: I1011 05:08:42.376864 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6b53ec0e-8ba3-47a6-9715-84fe1ca28ff1-etc-machine-id\") pod \"cinder-api-0\" (UID: \"6b53ec0e-8ba3-47a6-9715-84fe1ca28ff1\") " pod="openstack/cinder-api-0" Oct 11 05:08:42 crc kubenswrapper[4651]: I1011 05:08:42.377361 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6b53ec0e-8ba3-47a6-9715-84fe1ca28ff1-logs\") pod \"cinder-api-0\" (UID: \"6b53ec0e-8ba3-47a6-9715-84fe1ca28ff1\") " pod="openstack/cinder-api-0" Oct 11 05:08:42 crc kubenswrapper[4651]: I1011 05:08:42.380790 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b53ec0e-8ba3-47a6-9715-84fe1ca28ff1-scripts\") pod \"cinder-api-0\" (UID: \"6b53ec0e-8ba3-47a6-9715-84fe1ca28ff1\") " pod="openstack/cinder-api-0" Oct 11 05:08:42 crc kubenswrapper[4651]: I1011 05:08:42.382329 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b53ec0e-8ba3-47a6-9715-84fe1ca28ff1-config-data\") pod \"cinder-api-0\" (UID: \"6b53ec0e-8ba3-47a6-9715-84fe1ca28ff1\") " pod="openstack/cinder-api-0" Oct 11 05:08:42 crc kubenswrapper[4651]: I1011 05:08:42.383257 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6b53ec0e-8ba3-47a6-9715-84fe1ca28ff1-public-tls-certs\") pod \"cinder-api-0\" (UID: \"6b53ec0e-8ba3-47a6-9715-84fe1ca28ff1\") " pod="openstack/cinder-api-0" Oct 11 05:08:42 crc kubenswrapper[4651]: I1011 05:08:42.387149 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6b53ec0e-8ba3-47a6-9715-84fe1ca28ff1-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"6b53ec0e-8ba3-47a6-9715-84fe1ca28ff1\") " pod="openstack/cinder-api-0" Oct 11 05:08:42 crc kubenswrapper[4651]: I1011 05:08:42.395374 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6b53ec0e-8ba3-47a6-9715-84fe1ca28ff1-config-data-custom\") pod \"cinder-api-0\" (UID: \"6b53ec0e-8ba3-47a6-9715-84fe1ca28ff1\") " pod="openstack/cinder-api-0" Oct 11 05:08:42 crc kubenswrapper[4651]: I1011 05:08:42.395691 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b53ec0e-8ba3-47a6-9715-84fe1ca28ff1-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"6b53ec0e-8ba3-47a6-9715-84fe1ca28ff1\") " pod="openstack/cinder-api-0" Oct 11 05:08:42 crc kubenswrapper[4651]: I1011 05:08:42.436487 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kfkvd\" (UniqueName: \"kubernetes.io/projected/6b53ec0e-8ba3-47a6-9715-84fe1ca28ff1-kube-api-access-kfkvd\") pod \"cinder-api-0\" (UID: \"6b53ec0e-8ba3-47a6-9715-84fe1ca28ff1\") " pod="openstack/cinder-api-0" Oct 11 05:08:42 crc kubenswrapper[4651]: I1011 05:08:42.480065 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 11 05:08:42 crc kubenswrapper[4651]: I1011 05:08:42.963165 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-694f8cb944-jlqsz" Oct 11 05:08:42 crc kubenswrapper[4651]: I1011 05:08:42.970727 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Oct 11 05:08:42 crc kubenswrapper[4651]: W1011 05:08:42.978918 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6b53ec0e_8ba3_47a6_9715_84fe1ca28ff1.slice/crio-98a9b72f3a31697d3962e8714017eaec26897ee92eea505b82300cec88b5a631 WatchSource:0}: Error finding container 98a9b72f3a31697d3962e8714017eaec26897ee92eea505b82300cec88b5a631: Status 404 returned error can't find the container with id 98a9b72f3a31697d3962e8714017eaec26897ee92eea505b82300cec88b5a631 Oct 11 05:08:43 crc kubenswrapper[4651]: I1011 05:08:43.257027 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Oct 11 05:08:43 crc kubenswrapper[4651]: I1011 05:08:43.610637 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-694f8cb944-jlqsz" Oct 11 05:08:43 crc kubenswrapper[4651]: I1011 05:08:43.687138 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-6f5cf48bd-n6csr"] Oct 11 05:08:43 crc kubenswrapper[4651]: I1011 05:08:43.687358 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-6f5cf48bd-n6csr" podUID="9b7de539-a960-4684-89a8-80cf59a5616f" containerName="barbican-api-log" containerID="cri-o://62abaffe16727724b3354df55308d8a64f2ef7df9fd9d81d3a846f41193d3d43" gracePeriod=30 Oct 11 05:08:43 crc kubenswrapper[4651]: I1011 05:08:43.687805 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-6f5cf48bd-n6csr" podUID="9b7de539-a960-4684-89a8-80cf59a5616f" containerName="barbican-api" containerID="cri-o://ad5cd4c6b13e2dc39cd4c72bd325768513f721c6738addc8bcf4d1bc4b56d18e" gracePeriod=30 Oct 11 05:08:43 crc kubenswrapper[4651]: I1011 05:08:43.817996 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"6b53ec0e-8ba3-47a6-9715-84fe1ca28ff1","Type":"ContainerStarted","Data":"ad4e0dce7fb0e1010811f502f4cae8d7c7f2ebff1e58ae8782408c9af027dc0c"} Oct 11 05:08:43 crc kubenswrapper[4651]: I1011 05:08:43.818336 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"6b53ec0e-8ba3-47a6-9715-84fe1ca28ff1","Type":"ContainerStarted","Data":"98a9b72f3a31697d3962e8714017eaec26897ee92eea505b82300cec88b5a631"} Oct 11 05:08:43 crc kubenswrapper[4651]: I1011 05:08:43.898534 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d61d0161-dfcd-4700-8940-e6d1754e3e54" path="/var/lib/kubelet/pods/d61d0161-dfcd-4700-8940-e6d1754e3e54/volumes" Oct 11 05:08:44 crc kubenswrapper[4651]: I1011 05:08:44.829567 4651 generic.go:334] "Generic (PLEG): container finished" podID="9b7de539-a960-4684-89a8-80cf59a5616f" containerID="62abaffe16727724b3354df55308d8a64f2ef7df9fd9d81d3a846f41193d3d43" exitCode=143 Oct 11 05:08:44 crc kubenswrapper[4651]: I1011 05:08:44.829662 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6f5cf48bd-n6csr" event={"ID":"9b7de539-a960-4684-89a8-80cf59a5616f","Type":"ContainerDied","Data":"62abaffe16727724b3354df55308d8a64f2ef7df9fd9d81d3a846f41193d3d43"} Oct 11 05:08:44 crc kubenswrapper[4651]: I1011 05:08:44.831922 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"6b53ec0e-8ba3-47a6-9715-84fe1ca28ff1","Type":"ContainerStarted","Data":"7254d69277705edc908461631bf1473ca44ddba77055ccdb15cb10a45d4f1af7"} Oct 11 05:08:44 crc kubenswrapper[4651]: I1011 05:08:44.833422 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Oct 11 05:08:44 crc kubenswrapper[4651]: I1011 05:08:44.855371 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=2.85533954 podStartE2EDuration="2.85533954s" podCreationTimestamp="2025-10-11 05:08:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 05:08:44.851670587 +0000 UTC m=+1045.747903393" watchObservedRunningTime="2025-10-11 05:08:44.85533954 +0000 UTC m=+1045.751572336" Oct 11 05:08:45 crc kubenswrapper[4651]: I1011 05:08:45.844410 4651 generic.go:334] "Generic (PLEG): container finished" podID="93e6a5fd-218d-44c2-9bf9-2796b1ff0c33" containerID="e808ba8643e65bfa96c6d2b0dba9c582de46d8fb985cecf451cd1bb241335b5b" exitCode=0 Oct 11 05:08:45 crc kubenswrapper[4651]: I1011 05:08:45.844489 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-68976f6bc6-9jl66" event={"ID":"93e6a5fd-218d-44c2-9bf9-2796b1ff0c33","Type":"ContainerDied","Data":"e808ba8643e65bfa96c6d2b0dba9c582de46d8fb985cecf451cd1bb241335b5b"} Oct 11 05:08:46 crc kubenswrapper[4651]: I1011 05:08:46.194318 4651 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-68976f6bc6-9jl66" podUID="93e6a5fd-218d-44c2-9bf9-2796b1ff0c33" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.150:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.150:8443: connect: connection refused" Oct 11 05:08:46 crc kubenswrapper[4651]: I1011 05:08:46.617134 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-79fc6b7784-n2xpf" Oct 11 05:08:46 crc kubenswrapper[4651]: I1011 05:08:46.760991 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/84763f01-3ff4-49ae-a364-e54b62308ff0-ovndb-tls-certs\") pod \"84763f01-3ff4-49ae-a364-e54b62308ff0\" (UID: \"84763f01-3ff4-49ae-a364-e54b62308ff0\") " Oct 11 05:08:46 crc kubenswrapper[4651]: I1011 05:08:46.761209 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/84763f01-3ff4-49ae-a364-e54b62308ff0-config\") pod \"84763f01-3ff4-49ae-a364-e54b62308ff0\" (UID: \"84763f01-3ff4-49ae-a364-e54b62308ff0\") " Oct 11 05:08:46 crc kubenswrapper[4651]: I1011 05:08:46.761272 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/84763f01-3ff4-49ae-a364-e54b62308ff0-httpd-config\") pod \"84763f01-3ff4-49ae-a364-e54b62308ff0\" (UID: \"84763f01-3ff4-49ae-a364-e54b62308ff0\") " Oct 11 05:08:46 crc kubenswrapper[4651]: I1011 05:08:46.761415 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/84763f01-3ff4-49ae-a364-e54b62308ff0-combined-ca-bundle\") pod \"84763f01-3ff4-49ae-a364-e54b62308ff0\" (UID: \"84763f01-3ff4-49ae-a364-e54b62308ff0\") " Oct 11 05:08:46 crc kubenswrapper[4651]: I1011 05:08:46.761475 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-75ct4\" (UniqueName: \"kubernetes.io/projected/84763f01-3ff4-49ae-a364-e54b62308ff0-kube-api-access-75ct4\") pod \"84763f01-3ff4-49ae-a364-e54b62308ff0\" (UID: \"84763f01-3ff4-49ae-a364-e54b62308ff0\") " Oct 11 05:08:46 crc kubenswrapper[4651]: I1011 05:08:46.767835 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/84763f01-3ff4-49ae-a364-e54b62308ff0-kube-api-access-75ct4" (OuterVolumeSpecName: "kube-api-access-75ct4") pod "84763f01-3ff4-49ae-a364-e54b62308ff0" (UID: "84763f01-3ff4-49ae-a364-e54b62308ff0"). InnerVolumeSpecName "kube-api-access-75ct4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:08:46 crc kubenswrapper[4651]: I1011 05:08:46.773976 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/84763f01-3ff4-49ae-a364-e54b62308ff0-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "84763f01-3ff4-49ae-a364-e54b62308ff0" (UID: "84763f01-3ff4-49ae-a364-e54b62308ff0"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:08:46 crc kubenswrapper[4651]: I1011 05:08:46.823322 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/84763f01-3ff4-49ae-a364-e54b62308ff0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "84763f01-3ff4-49ae-a364-e54b62308ff0" (UID: "84763f01-3ff4-49ae-a364-e54b62308ff0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:08:46 crc kubenswrapper[4651]: I1011 05:08:46.840768 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/84763f01-3ff4-49ae-a364-e54b62308ff0-config" (OuterVolumeSpecName: "config") pod "84763f01-3ff4-49ae-a364-e54b62308ff0" (UID: "84763f01-3ff4-49ae-a364-e54b62308ff0"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:08:46 crc kubenswrapper[4651]: I1011 05:08:46.862642 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/84763f01-3ff4-49ae-a364-e54b62308ff0-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "84763f01-3ff4-49ae-a364-e54b62308ff0" (UID: "84763f01-3ff4-49ae-a364-e54b62308ff0"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:08:46 crc kubenswrapper[4651]: I1011 05:08:46.865647 4651 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/84763f01-3ff4-49ae-a364-e54b62308ff0-config\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:46 crc kubenswrapper[4651]: I1011 05:08:46.865806 4651 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/84763f01-3ff4-49ae-a364-e54b62308ff0-httpd-config\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:46 crc kubenswrapper[4651]: I1011 05:08:46.865896 4651 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/84763f01-3ff4-49ae-a364-e54b62308ff0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:46 crc kubenswrapper[4651]: I1011 05:08:46.865985 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-75ct4\" (UniqueName: \"kubernetes.io/projected/84763f01-3ff4-49ae-a364-e54b62308ff0-kube-api-access-75ct4\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:46 crc kubenswrapper[4651]: I1011 05:08:46.866082 4651 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/84763f01-3ff4-49ae-a364-e54b62308ff0-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:46 crc kubenswrapper[4651]: I1011 05:08:46.869302 4651 generic.go:334] "Generic (PLEG): container finished" podID="84763f01-3ff4-49ae-a364-e54b62308ff0" containerID="ece7b2fef8409b80f241001509dfa5bc31212c2afe506cd86038c7f07a958996" exitCode=0 Oct 11 05:08:46 crc kubenswrapper[4651]: I1011 05:08:46.869381 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-79fc6b7784-n2xpf" Oct 11 05:08:46 crc kubenswrapper[4651]: I1011 05:08:46.872869 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-79fc6b7784-n2xpf" event={"ID":"84763f01-3ff4-49ae-a364-e54b62308ff0","Type":"ContainerDied","Data":"ece7b2fef8409b80f241001509dfa5bc31212c2afe506cd86038c7f07a958996"} Oct 11 05:08:46 crc kubenswrapper[4651]: I1011 05:08:46.872904 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-79fc6b7784-n2xpf" event={"ID":"84763f01-3ff4-49ae-a364-e54b62308ff0","Type":"ContainerDied","Data":"8b8c2de02d00e20dbafdc012e91c04ae2fc9629f92c9dd88462c168c33af4a90"} Oct 11 05:08:46 crc kubenswrapper[4651]: I1011 05:08:46.872926 4651 scope.go:117] "RemoveContainer" containerID="2588ea3b17498f8c9979d7197450c91ed307a1b5ddb07ef32f2476c9b05053af" Oct 11 05:08:46 crc kubenswrapper[4651]: I1011 05:08:46.983641 4651 scope.go:117] "RemoveContainer" containerID="ece7b2fef8409b80f241001509dfa5bc31212c2afe506cd86038c7f07a958996" Oct 11 05:08:47 crc kubenswrapper[4651]: I1011 05:08:47.036954 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-79fc6b7784-n2xpf"] Oct 11 05:08:47 crc kubenswrapper[4651]: I1011 05:08:47.064021 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-79fc6b7784-n2xpf"] Oct 11 05:08:47 crc kubenswrapper[4651]: I1011 05:08:47.072589 4651 scope.go:117] "RemoveContainer" containerID="2588ea3b17498f8c9979d7197450c91ed307a1b5ddb07ef32f2476c9b05053af" Oct 11 05:08:47 crc kubenswrapper[4651]: E1011 05:08:47.073201 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2588ea3b17498f8c9979d7197450c91ed307a1b5ddb07ef32f2476c9b05053af\": container with ID starting with 2588ea3b17498f8c9979d7197450c91ed307a1b5ddb07ef32f2476c9b05053af not found: ID does not exist" containerID="2588ea3b17498f8c9979d7197450c91ed307a1b5ddb07ef32f2476c9b05053af" Oct 11 05:08:47 crc kubenswrapper[4651]: I1011 05:08:47.073235 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2588ea3b17498f8c9979d7197450c91ed307a1b5ddb07ef32f2476c9b05053af"} err="failed to get container status \"2588ea3b17498f8c9979d7197450c91ed307a1b5ddb07ef32f2476c9b05053af\": rpc error: code = NotFound desc = could not find container \"2588ea3b17498f8c9979d7197450c91ed307a1b5ddb07ef32f2476c9b05053af\": container with ID starting with 2588ea3b17498f8c9979d7197450c91ed307a1b5ddb07ef32f2476c9b05053af not found: ID does not exist" Oct 11 05:08:47 crc kubenswrapper[4651]: I1011 05:08:47.073258 4651 scope.go:117] "RemoveContainer" containerID="ece7b2fef8409b80f241001509dfa5bc31212c2afe506cd86038c7f07a958996" Oct 11 05:08:47 crc kubenswrapper[4651]: E1011 05:08:47.074506 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ece7b2fef8409b80f241001509dfa5bc31212c2afe506cd86038c7f07a958996\": container with ID starting with ece7b2fef8409b80f241001509dfa5bc31212c2afe506cd86038c7f07a958996 not found: ID does not exist" containerID="ece7b2fef8409b80f241001509dfa5bc31212c2afe506cd86038c7f07a958996" Oct 11 05:08:47 crc kubenswrapper[4651]: I1011 05:08:47.074541 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ece7b2fef8409b80f241001509dfa5bc31212c2afe506cd86038c7f07a958996"} err="failed to get container status \"ece7b2fef8409b80f241001509dfa5bc31212c2afe506cd86038c7f07a958996\": rpc error: code = NotFound desc = could not find container \"ece7b2fef8409b80f241001509dfa5bc31212c2afe506cd86038c7f07a958996\": container with ID starting with ece7b2fef8409b80f241001509dfa5bc31212c2afe506cd86038c7f07a958996 not found: ID does not exist" Oct 11 05:08:47 crc kubenswrapper[4651]: I1011 05:08:47.230424 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6f5cf48bd-n6csr" Oct 11 05:08:47 crc kubenswrapper[4651]: I1011 05:08:47.374982 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-89hff\" (UniqueName: \"kubernetes.io/projected/9b7de539-a960-4684-89a8-80cf59a5616f-kube-api-access-89hff\") pod \"9b7de539-a960-4684-89a8-80cf59a5616f\" (UID: \"9b7de539-a960-4684-89a8-80cf59a5616f\") " Oct 11 05:08:47 crc kubenswrapper[4651]: I1011 05:08:47.375069 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9b7de539-a960-4684-89a8-80cf59a5616f-logs\") pod \"9b7de539-a960-4684-89a8-80cf59a5616f\" (UID: \"9b7de539-a960-4684-89a8-80cf59a5616f\") " Oct 11 05:08:47 crc kubenswrapper[4651]: I1011 05:08:47.375519 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9b7de539-a960-4684-89a8-80cf59a5616f-logs" (OuterVolumeSpecName: "logs") pod "9b7de539-a960-4684-89a8-80cf59a5616f" (UID: "9b7de539-a960-4684-89a8-80cf59a5616f"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 05:08:47 crc kubenswrapper[4651]: I1011 05:08:47.375597 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b7de539-a960-4684-89a8-80cf59a5616f-combined-ca-bundle\") pod \"9b7de539-a960-4684-89a8-80cf59a5616f\" (UID: \"9b7de539-a960-4684-89a8-80cf59a5616f\") " Oct 11 05:08:47 crc kubenswrapper[4651]: I1011 05:08:47.375884 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9b7de539-a960-4684-89a8-80cf59a5616f-config-data\") pod \"9b7de539-a960-4684-89a8-80cf59a5616f\" (UID: \"9b7de539-a960-4684-89a8-80cf59a5616f\") " Oct 11 05:08:47 crc kubenswrapper[4651]: I1011 05:08:47.375938 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9b7de539-a960-4684-89a8-80cf59a5616f-config-data-custom\") pod \"9b7de539-a960-4684-89a8-80cf59a5616f\" (UID: \"9b7de539-a960-4684-89a8-80cf59a5616f\") " Oct 11 05:08:47 crc kubenswrapper[4651]: I1011 05:08:47.376327 4651 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9b7de539-a960-4684-89a8-80cf59a5616f-logs\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:47 crc kubenswrapper[4651]: I1011 05:08:47.381242 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9b7de539-a960-4684-89a8-80cf59a5616f-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "9b7de539-a960-4684-89a8-80cf59a5616f" (UID: "9b7de539-a960-4684-89a8-80cf59a5616f"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:08:47 crc kubenswrapper[4651]: I1011 05:08:47.384309 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9b7de539-a960-4684-89a8-80cf59a5616f-kube-api-access-89hff" (OuterVolumeSpecName: "kube-api-access-89hff") pod "9b7de539-a960-4684-89a8-80cf59a5616f" (UID: "9b7de539-a960-4684-89a8-80cf59a5616f"). InnerVolumeSpecName "kube-api-access-89hff". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:08:47 crc kubenswrapper[4651]: I1011 05:08:47.414231 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9b7de539-a960-4684-89a8-80cf59a5616f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9b7de539-a960-4684-89a8-80cf59a5616f" (UID: "9b7de539-a960-4684-89a8-80cf59a5616f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:08:47 crc kubenswrapper[4651]: I1011 05:08:47.446586 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9b7de539-a960-4684-89a8-80cf59a5616f-config-data" (OuterVolumeSpecName: "config-data") pod "9b7de539-a960-4684-89a8-80cf59a5616f" (UID: "9b7de539-a960-4684-89a8-80cf59a5616f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:08:47 crc kubenswrapper[4651]: I1011 05:08:47.469039 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Oct 11 05:08:47 crc kubenswrapper[4651]: I1011 05:08:47.477167 4651 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b7de539-a960-4684-89a8-80cf59a5616f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:47 crc kubenswrapper[4651]: I1011 05:08:47.477195 4651 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9b7de539-a960-4684-89a8-80cf59a5616f-config-data\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:47 crc kubenswrapper[4651]: I1011 05:08:47.477205 4651 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9b7de539-a960-4684-89a8-80cf59a5616f-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:47 crc kubenswrapper[4651]: I1011 05:08:47.477213 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-89hff\" (UniqueName: \"kubernetes.io/projected/9b7de539-a960-4684-89a8-80cf59a5616f-kube-api-access-89hff\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:47 crc kubenswrapper[4651]: I1011 05:08:47.483965 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6bb4fc677f-n8cw8" Oct 11 05:08:47 crc kubenswrapper[4651]: I1011 05:08:47.577597 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 11 05:08:47 crc kubenswrapper[4651]: I1011 05:08:47.598804 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5ccc5c4795-9pngt"] Oct 11 05:08:47 crc kubenswrapper[4651]: I1011 05:08:47.599071 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5ccc5c4795-9pngt" podUID="65b0df64-2ebb-4c09-813e-1be5beb4e8ed" containerName="dnsmasq-dns" containerID="cri-o://68a8a6f2bea783b912ab6323eae94718961e555c71ee717def8ada8bfac784c1" gracePeriod=10 Oct 11 05:08:47 crc kubenswrapper[4651]: I1011 05:08:47.885104 4651 generic.go:334] "Generic (PLEG): container finished" podID="9b7de539-a960-4684-89a8-80cf59a5616f" containerID="ad5cd4c6b13e2dc39cd4c72bd325768513f721c6738addc8bcf4d1bc4b56d18e" exitCode=0 Oct 11 05:08:47 crc kubenswrapper[4651]: I1011 05:08:47.885560 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6f5cf48bd-n6csr" Oct 11 05:08:47 crc kubenswrapper[4651]: I1011 05:08:47.887398 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="84763f01-3ff4-49ae-a364-e54b62308ff0" path="/var/lib/kubelet/pods/84763f01-3ff4-49ae-a364-e54b62308ff0/volumes" Oct 11 05:08:47 crc kubenswrapper[4651]: I1011 05:08:47.889175 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6f5cf48bd-n6csr" event={"ID":"9b7de539-a960-4684-89a8-80cf59a5616f","Type":"ContainerDied","Data":"ad5cd4c6b13e2dc39cd4c72bd325768513f721c6738addc8bcf4d1bc4b56d18e"} Oct 11 05:08:47 crc kubenswrapper[4651]: I1011 05:08:47.889398 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6f5cf48bd-n6csr" event={"ID":"9b7de539-a960-4684-89a8-80cf59a5616f","Type":"ContainerDied","Data":"6e2179fae02f6190cc3e457973e99af7d5ae8a43d9d4c4a5a6f242ed4b8a279f"} Oct 11 05:08:47 crc kubenswrapper[4651]: I1011 05:08:47.889435 4651 scope.go:117] "RemoveContainer" containerID="ad5cd4c6b13e2dc39cd4c72bd325768513f721c6738addc8bcf4d1bc4b56d18e" Oct 11 05:08:47 crc kubenswrapper[4651]: I1011 05:08:47.910036 4651 generic.go:334] "Generic (PLEG): container finished" podID="65b0df64-2ebb-4c09-813e-1be5beb4e8ed" containerID="68a8a6f2bea783b912ab6323eae94718961e555c71ee717def8ada8bfac784c1" exitCode=0 Oct 11 05:08:47 crc kubenswrapper[4651]: I1011 05:08:47.910133 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5ccc5c4795-9pngt" event={"ID":"65b0df64-2ebb-4c09-813e-1be5beb4e8ed","Type":"ContainerDied","Data":"68a8a6f2bea783b912ab6323eae94718961e555c71ee717def8ada8bfac784c1"} Oct 11 05:08:47 crc kubenswrapper[4651]: I1011 05:08:47.910286 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="2df51120-c272-4eb3-bbdf-85c2753f7640" containerName="cinder-scheduler" containerID="cri-o://7cc1b454f8974566f2d64eb7fbae7b6423341070930fb72c9ca263ff19af5649" gracePeriod=30 Oct 11 05:08:47 crc kubenswrapper[4651]: I1011 05:08:47.910375 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="2df51120-c272-4eb3-bbdf-85c2753f7640" containerName="probe" containerID="cri-o://2a85a10f3f7bab9e0694a732c911b4b71473716433fe3be528287dd0c0df4eef" gracePeriod=30 Oct 11 05:08:47 crc kubenswrapper[4651]: I1011 05:08:47.953280 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-6f5cf48bd-n6csr"] Oct 11 05:08:47 crc kubenswrapper[4651]: I1011 05:08:47.960611 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-6f5cf48bd-n6csr"] Oct 11 05:08:47 crc kubenswrapper[4651]: I1011 05:08:47.963240 4651 scope.go:117] "RemoveContainer" containerID="62abaffe16727724b3354df55308d8a64f2ef7df9fd9d81d3a846f41193d3d43" Oct 11 05:08:47 crc kubenswrapper[4651]: I1011 05:08:47.992129 4651 scope.go:117] "RemoveContainer" containerID="ad5cd4c6b13e2dc39cd4c72bd325768513f721c6738addc8bcf4d1bc4b56d18e" Oct 11 05:08:47 crc kubenswrapper[4651]: E1011 05:08:47.995533 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ad5cd4c6b13e2dc39cd4c72bd325768513f721c6738addc8bcf4d1bc4b56d18e\": container with ID starting with ad5cd4c6b13e2dc39cd4c72bd325768513f721c6738addc8bcf4d1bc4b56d18e not found: ID does not exist" containerID="ad5cd4c6b13e2dc39cd4c72bd325768513f721c6738addc8bcf4d1bc4b56d18e" Oct 11 05:08:47 crc kubenswrapper[4651]: I1011 05:08:47.995566 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ad5cd4c6b13e2dc39cd4c72bd325768513f721c6738addc8bcf4d1bc4b56d18e"} err="failed to get container status \"ad5cd4c6b13e2dc39cd4c72bd325768513f721c6738addc8bcf4d1bc4b56d18e\": rpc error: code = NotFound desc = could not find container \"ad5cd4c6b13e2dc39cd4c72bd325768513f721c6738addc8bcf4d1bc4b56d18e\": container with ID starting with ad5cd4c6b13e2dc39cd4c72bd325768513f721c6738addc8bcf4d1bc4b56d18e not found: ID does not exist" Oct 11 05:08:47 crc kubenswrapper[4651]: I1011 05:08:47.995586 4651 scope.go:117] "RemoveContainer" containerID="62abaffe16727724b3354df55308d8a64f2ef7df9fd9d81d3a846f41193d3d43" Oct 11 05:08:47 crc kubenswrapper[4651]: E1011 05:08:47.996075 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"62abaffe16727724b3354df55308d8a64f2ef7df9fd9d81d3a846f41193d3d43\": container with ID starting with 62abaffe16727724b3354df55308d8a64f2ef7df9fd9d81d3a846f41193d3d43 not found: ID does not exist" containerID="62abaffe16727724b3354df55308d8a64f2ef7df9fd9d81d3a846f41193d3d43" Oct 11 05:08:47 crc kubenswrapper[4651]: I1011 05:08:47.996098 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"62abaffe16727724b3354df55308d8a64f2ef7df9fd9d81d3a846f41193d3d43"} err="failed to get container status \"62abaffe16727724b3354df55308d8a64f2ef7df9fd9d81d3a846f41193d3d43\": rpc error: code = NotFound desc = could not find container \"62abaffe16727724b3354df55308d8a64f2ef7df9fd9d81d3a846f41193d3d43\": container with ID starting with 62abaffe16727724b3354df55308d8a64f2ef7df9fd9d81d3a846f41193d3d43 not found: ID does not exist" Oct 11 05:08:48 crc kubenswrapper[4651]: I1011 05:08:48.121812 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5ccc5c4795-9pngt" Oct 11 05:08:48 crc kubenswrapper[4651]: I1011 05:08:48.189765 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/65b0df64-2ebb-4c09-813e-1be5beb4e8ed-ovsdbserver-sb\") pod \"65b0df64-2ebb-4c09-813e-1be5beb4e8ed\" (UID: \"65b0df64-2ebb-4c09-813e-1be5beb4e8ed\") " Oct 11 05:08:48 crc kubenswrapper[4651]: I1011 05:08:48.189831 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/65b0df64-2ebb-4c09-813e-1be5beb4e8ed-config\") pod \"65b0df64-2ebb-4c09-813e-1be5beb4e8ed\" (UID: \"65b0df64-2ebb-4c09-813e-1be5beb4e8ed\") " Oct 11 05:08:48 crc kubenswrapper[4651]: I1011 05:08:48.189905 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c9ktv\" (UniqueName: \"kubernetes.io/projected/65b0df64-2ebb-4c09-813e-1be5beb4e8ed-kube-api-access-c9ktv\") pod \"65b0df64-2ebb-4c09-813e-1be5beb4e8ed\" (UID: \"65b0df64-2ebb-4c09-813e-1be5beb4e8ed\") " Oct 11 05:08:48 crc kubenswrapper[4651]: I1011 05:08:48.189941 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/65b0df64-2ebb-4c09-813e-1be5beb4e8ed-ovsdbserver-nb\") pod \"65b0df64-2ebb-4c09-813e-1be5beb4e8ed\" (UID: \"65b0df64-2ebb-4c09-813e-1be5beb4e8ed\") " Oct 11 05:08:48 crc kubenswrapper[4651]: I1011 05:08:48.189984 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/65b0df64-2ebb-4c09-813e-1be5beb4e8ed-dns-svc\") pod \"65b0df64-2ebb-4c09-813e-1be5beb4e8ed\" (UID: \"65b0df64-2ebb-4c09-813e-1be5beb4e8ed\") " Oct 11 05:08:48 crc kubenswrapper[4651]: I1011 05:08:48.190011 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/65b0df64-2ebb-4c09-813e-1be5beb4e8ed-dns-swift-storage-0\") pod \"65b0df64-2ebb-4c09-813e-1be5beb4e8ed\" (UID: \"65b0df64-2ebb-4c09-813e-1be5beb4e8ed\") " Oct 11 05:08:48 crc kubenswrapper[4651]: I1011 05:08:48.199336 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/65b0df64-2ebb-4c09-813e-1be5beb4e8ed-kube-api-access-c9ktv" (OuterVolumeSpecName: "kube-api-access-c9ktv") pod "65b0df64-2ebb-4c09-813e-1be5beb4e8ed" (UID: "65b0df64-2ebb-4c09-813e-1be5beb4e8ed"). InnerVolumeSpecName "kube-api-access-c9ktv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:08:48 crc kubenswrapper[4651]: I1011 05:08:48.245560 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/65b0df64-2ebb-4c09-813e-1be5beb4e8ed-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "65b0df64-2ebb-4c09-813e-1be5beb4e8ed" (UID: "65b0df64-2ebb-4c09-813e-1be5beb4e8ed"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:08:48 crc kubenswrapper[4651]: I1011 05:08:48.245593 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/65b0df64-2ebb-4c09-813e-1be5beb4e8ed-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "65b0df64-2ebb-4c09-813e-1be5beb4e8ed" (UID: "65b0df64-2ebb-4c09-813e-1be5beb4e8ed"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:08:48 crc kubenswrapper[4651]: I1011 05:08:48.260132 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/65b0df64-2ebb-4c09-813e-1be5beb4e8ed-config" (OuterVolumeSpecName: "config") pod "65b0df64-2ebb-4c09-813e-1be5beb4e8ed" (UID: "65b0df64-2ebb-4c09-813e-1be5beb4e8ed"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:08:48 crc kubenswrapper[4651]: I1011 05:08:48.267614 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/65b0df64-2ebb-4c09-813e-1be5beb4e8ed-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "65b0df64-2ebb-4c09-813e-1be5beb4e8ed" (UID: "65b0df64-2ebb-4c09-813e-1be5beb4e8ed"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:08:48 crc kubenswrapper[4651]: I1011 05:08:48.271249 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/65b0df64-2ebb-4c09-813e-1be5beb4e8ed-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "65b0df64-2ebb-4c09-813e-1be5beb4e8ed" (UID: "65b0df64-2ebb-4c09-813e-1be5beb4e8ed"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:08:48 crc kubenswrapper[4651]: I1011 05:08:48.292348 4651 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/65b0df64-2ebb-4c09-813e-1be5beb4e8ed-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:48 crc kubenswrapper[4651]: I1011 05:08:48.292703 4651 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/65b0df64-2ebb-4c09-813e-1be5beb4e8ed-config\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:48 crc kubenswrapper[4651]: I1011 05:08:48.292789 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c9ktv\" (UniqueName: \"kubernetes.io/projected/65b0df64-2ebb-4c09-813e-1be5beb4e8ed-kube-api-access-c9ktv\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:48 crc kubenswrapper[4651]: I1011 05:08:48.292865 4651 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/65b0df64-2ebb-4c09-813e-1be5beb4e8ed-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:48 crc kubenswrapper[4651]: I1011 05:08:48.293021 4651 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/65b0df64-2ebb-4c09-813e-1be5beb4e8ed-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:48 crc kubenswrapper[4651]: I1011 05:08:48.293146 4651 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/65b0df64-2ebb-4c09-813e-1be5beb4e8ed-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:48 crc kubenswrapper[4651]: I1011 05:08:48.920303 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5ccc5c4795-9pngt" event={"ID":"65b0df64-2ebb-4c09-813e-1be5beb4e8ed","Type":"ContainerDied","Data":"6d7e49238432ea32813c6a28eb51edf1d4b7772c9c76eb20e3e1f2e95f6aaa45"} Oct 11 05:08:48 crc kubenswrapper[4651]: I1011 05:08:48.920585 4651 scope.go:117] "RemoveContainer" containerID="68a8a6f2bea783b912ab6323eae94718961e555c71ee717def8ada8bfac784c1" Oct 11 05:08:48 crc kubenswrapper[4651]: I1011 05:08:48.920702 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5ccc5c4795-9pngt" Oct 11 05:08:48 crc kubenswrapper[4651]: I1011 05:08:48.926551 4651 generic.go:334] "Generic (PLEG): container finished" podID="2df51120-c272-4eb3-bbdf-85c2753f7640" containerID="2a85a10f3f7bab9e0694a732c911b4b71473716433fe3be528287dd0c0df4eef" exitCode=0 Oct 11 05:08:48 crc kubenswrapper[4651]: I1011 05:08:48.926659 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"2df51120-c272-4eb3-bbdf-85c2753f7640","Type":"ContainerDied","Data":"2a85a10f3f7bab9e0694a732c911b4b71473716433fe3be528287dd0c0df4eef"} Oct 11 05:08:48 crc kubenswrapper[4651]: I1011 05:08:48.972171 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5ccc5c4795-9pngt"] Oct 11 05:08:48 crc kubenswrapper[4651]: I1011 05:08:48.977557 4651 scope.go:117] "RemoveContainer" containerID="0de6bffe549422a38a213f3c9875e678a358eb25b7786bed61f425e999bfe54d" Oct 11 05:08:48 crc kubenswrapper[4651]: I1011 05:08:48.978513 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5ccc5c4795-9pngt"] Oct 11 05:08:49 crc kubenswrapper[4651]: I1011 05:08:49.884942 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="65b0df64-2ebb-4c09-813e-1be5beb4e8ed" path="/var/lib/kubelet/pods/65b0df64-2ebb-4c09-813e-1be5beb4e8ed/volumes" Oct 11 05:08:49 crc kubenswrapper[4651]: I1011 05:08:49.885950 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9b7de539-a960-4684-89a8-80cf59a5616f" path="/var/lib/kubelet/pods/9b7de539-a960-4684-89a8-80cf59a5616f/volumes" Oct 11 05:08:51 crc kubenswrapper[4651]: I1011 05:08:51.513325 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-5dbc5c6b84-lhwcw" Oct 11 05:08:51 crc kubenswrapper[4651]: I1011 05:08:51.977658 4651 generic.go:334] "Generic (PLEG): container finished" podID="2df51120-c272-4eb3-bbdf-85c2753f7640" containerID="7cc1b454f8974566f2d64eb7fbae7b6423341070930fb72c9ca263ff19af5649" exitCode=0 Oct 11 05:08:51 crc kubenswrapper[4651]: I1011 05:08:51.977719 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"2df51120-c272-4eb3-bbdf-85c2753f7640","Type":"ContainerDied","Data":"7cc1b454f8974566f2d64eb7fbae7b6423341070930fb72c9ca263ff19af5649"} Oct 11 05:08:52 crc kubenswrapper[4651]: I1011 05:08:52.171754 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 11 05:08:52 crc kubenswrapper[4651]: I1011 05:08:52.262714 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2df51120-c272-4eb3-bbdf-85c2753f7640-config-data-custom\") pod \"2df51120-c272-4eb3-bbdf-85c2753f7640\" (UID: \"2df51120-c272-4eb3-bbdf-85c2753f7640\") " Oct 11 05:08:52 crc kubenswrapper[4651]: I1011 05:08:52.262843 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7czwc\" (UniqueName: \"kubernetes.io/projected/2df51120-c272-4eb3-bbdf-85c2753f7640-kube-api-access-7czwc\") pod \"2df51120-c272-4eb3-bbdf-85c2753f7640\" (UID: \"2df51120-c272-4eb3-bbdf-85c2753f7640\") " Oct 11 05:08:52 crc kubenswrapper[4651]: I1011 05:08:52.262883 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2df51120-c272-4eb3-bbdf-85c2753f7640-combined-ca-bundle\") pod \"2df51120-c272-4eb3-bbdf-85c2753f7640\" (UID: \"2df51120-c272-4eb3-bbdf-85c2753f7640\") " Oct 11 05:08:52 crc kubenswrapper[4651]: I1011 05:08:52.262908 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2df51120-c272-4eb3-bbdf-85c2753f7640-etc-machine-id\") pod \"2df51120-c272-4eb3-bbdf-85c2753f7640\" (UID: \"2df51120-c272-4eb3-bbdf-85c2753f7640\") " Oct 11 05:08:52 crc kubenswrapper[4651]: I1011 05:08:52.262945 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2df51120-c272-4eb3-bbdf-85c2753f7640-scripts\") pod \"2df51120-c272-4eb3-bbdf-85c2753f7640\" (UID: \"2df51120-c272-4eb3-bbdf-85c2753f7640\") " Oct 11 05:08:52 crc kubenswrapper[4651]: I1011 05:08:52.263049 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2df51120-c272-4eb3-bbdf-85c2753f7640-config-data\") pod \"2df51120-c272-4eb3-bbdf-85c2753f7640\" (UID: \"2df51120-c272-4eb3-bbdf-85c2753f7640\") " Oct 11 05:08:52 crc kubenswrapper[4651]: I1011 05:08:52.269944 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2df51120-c272-4eb3-bbdf-85c2753f7640-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "2df51120-c272-4eb3-bbdf-85c2753f7640" (UID: "2df51120-c272-4eb3-bbdf-85c2753f7640"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 11 05:08:52 crc kubenswrapper[4651]: I1011 05:08:52.272026 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2df51120-c272-4eb3-bbdf-85c2753f7640-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "2df51120-c272-4eb3-bbdf-85c2753f7640" (UID: "2df51120-c272-4eb3-bbdf-85c2753f7640"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:08:52 crc kubenswrapper[4651]: I1011 05:08:52.273195 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2df51120-c272-4eb3-bbdf-85c2753f7640-scripts" (OuterVolumeSpecName: "scripts") pod "2df51120-c272-4eb3-bbdf-85c2753f7640" (UID: "2df51120-c272-4eb3-bbdf-85c2753f7640"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:08:52 crc kubenswrapper[4651]: I1011 05:08:52.275064 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2df51120-c272-4eb3-bbdf-85c2753f7640-kube-api-access-7czwc" (OuterVolumeSpecName: "kube-api-access-7czwc") pod "2df51120-c272-4eb3-bbdf-85c2753f7640" (UID: "2df51120-c272-4eb3-bbdf-85c2753f7640"). InnerVolumeSpecName "kube-api-access-7czwc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:08:52 crc kubenswrapper[4651]: I1011 05:08:52.329506 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2df51120-c272-4eb3-bbdf-85c2753f7640-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2df51120-c272-4eb3-bbdf-85c2753f7640" (UID: "2df51120-c272-4eb3-bbdf-85c2753f7640"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:08:52 crc kubenswrapper[4651]: I1011 05:08:52.362225 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2df51120-c272-4eb3-bbdf-85c2753f7640-config-data" (OuterVolumeSpecName: "config-data") pod "2df51120-c272-4eb3-bbdf-85c2753f7640" (UID: "2df51120-c272-4eb3-bbdf-85c2753f7640"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:08:52 crc kubenswrapper[4651]: I1011 05:08:52.365430 4651 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2df51120-c272-4eb3-bbdf-85c2753f7640-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:52 crc kubenswrapper[4651]: I1011 05:08:52.365455 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7czwc\" (UniqueName: \"kubernetes.io/projected/2df51120-c272-4eb3-bbdf-85c2753f7640-kube-api-access-7czwc\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:52 crc kubenswrapper[4651]: I1011 05:08:52.365467 4651 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2df51120-c272-4eb3-bbdf-85c2753f7640-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:52 crc kubenswrapper[4651]: I1011 05:08:52.365477 4651 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2df51120-c272-4eb3-bbdf-85c2753f7640-etc-machine-id\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:52 crc kubenswrapper[4651]: I1011 05:08:52.365487 4651 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2df51120-c272-4eb3-bbdf-85c2753f7640-scripts\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:52 crc kubenswrapper[4651]: I1011 05:08:52.365496 4651 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2df51120-c272-4eb3-bbdf-85c2753f7640-config-data\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:52 crc kubenswrapper[4651]: I1011 05:08:52.989517 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"2df51120-c272-4eb3-bbdf-85c2753f7640","Type":"ContainerDied","Data":"c252263229b61ae1be17e471ba26fa0bc466328846e216ef35ae54957ff0d755"} Oct 11 05:08:52 crc kubenswrapper[4651]: I1011 05:08:52.991670 4651 scope.go:117] "RemoveContainer" containerID="2a85a10f3f7bab9e0694a732c911b4b71473716433fe3be528287dd0c0df4eef" Oct 11 05:08:52 crc kubenswrapper[4651]: I1011 05:08:52.991974 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 11 05:08:53 crc kubenswrapper[4651]: I1011 05:08:53.014930 4651 scope.go:117] "RemoveContainer" containerID="7cc1b454f8974566f2d64eb7fbae7b6423341070930fb72c9ca263ff19af5649" Oct 11 05:08:53 crc kubenswrapper[4651]: I1011 05:08:53.030440 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 11 05:08:53 crc kubenswrapper[4651]: I1011 05:08:53.043770 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 11 05:08:53 crc kubenswrapper[4651]: I1011 05:08:53.062783 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Oct 11 05:08:53 crc kubenswrapper[4651]: E1011 05:08:53.063347 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65b0df64-2ebb-4c09-813e-1be5beb4e8ed" containerName="dnsmasq-dns" Oct 11 05:08:53 crc kubenswrapper[4651]: I1011 05:08:53.063441 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="65b0df64-2ebb-4c09-813e-1be5beb4e8ed" containerName="dnsmasq-dns" Oct 11 05:08:53 crc kubenswrapper[4651]: E1011 05:08:53.063503 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84763f01-3ff4-49ae-a364-e54b62308ff0" containerName="neutron-httpd" Oct 11 05:08:53 crc kubenswrapper[4651]: I1011 05:08:53.063552 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="84763f01-3ff4-49ae-a364-e54b62308ff0" containerName="neutron-httpd" Oct 11 05:08:53 crc kubenswrapper[4651]: E1011 05:08:53.063622 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2df51120-c272-4eb3-bbdf-85c2753f7640" containerName="cinder-scheduler" Oct 11 05:08:53 crc kubenswrapper[4651]: I1011 05:08:53.063696 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="2df51120-c272-4eb3-bbdf-85c2753f7640" containerName="cinder-scheduler" Oct 11 05:08:53 crc kubenswrapper[4651]: E1011 05:08:53.063777 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65b0df64-2ebb-4c09-813e-1be5beb4e8ed" containerName="init" Oct 11 05:08:53 crc kubenswrapper[4651]: I1011 05:08:53.063895 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="65b0df64-2ebb-4c09-813e-1be5beb4e8ed" containerName="init" Oct 11 05:08:53 crc kubenswrapper[4651]: E1011 05:08:53.063957 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b7de539-a960-4684-89a8-80cf59a5616f" containerName="barbican-api" Oct 11 05:08:53 crc kubenswrapper[4651]: I1011 05:08:53.064016 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b7de539-a960-4684-89a8-80cf59a5616f" containerName="barbican-api" Oct 11 05:08:53 crc kubenswrapper[4651]: E1011 05:08:53.064073 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84763f01-3ff4-49ae-a364-e54b62308ff0" containerName="neutron-api" Oct 11 05:08:53 crc kubenswrapper[4651]: I1011 05:08:53.064122 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="84763f01-3ff4-49ae-a364-e54b62308ff0" containerName="neutron-api" Oct 11 05:08:53 crc kubenswrapper[4651]: E1011 05:08:53.064181 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2df51120-c272-4eb3-bbdf-85c2753f7640" containerName="probe" Oct 11 05:08:53 crc kubenswrapper[4651]: I1011 05:08:53.064250 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="2df51120-c272-4eb3-bbdf-85c2753f7640" containerName="probe" Oct 11 05:08:53 crc kubenswrapper[4651]: E1011 05:08:53.064325 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b7de539-a960-4684-89a8-80cf59a5616f" containerName="barbican-api-log" Oct 11 05:08:53 crc kubenswrapper[4651]: I1011 05:08:53.064387 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b7de539-a960-4684-89a8-80cf59a5616f" containerName="barbican-api-log" Oct 11 05:08:53 crc kubenswrapper[4651]: I1011 05:08:53.064585 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="84763f01-3ff4-49ae-a364-e54b62308ff0" containerName="neutron-httpd" Oct 11 05:08:53 crc kubenswrapper[4651]: I1011 05:08:53.065174 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="2df51120-c272-4eb3-bbdf-85c2753f7640" containerName="probe" Oct 11 05:08:53 crc kubenswrapper[4651]: I1011 05:08:53.065321 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="2df51120-c272-4eb3-bbdf-85c2753f7640" containerName="cinder-scheduler" Oct 11 05:08:53 crc kubenswrapper[4651]: I1011 05:08:53.065385 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="84763f01-3ff4-49ae-a364-e54b62308ff0" containerName="neutron-api" Oct 11 05:08:53 crc kubenswrapper[4651]: I1011 05:08:53.065448 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="65b0df64-2ebb-4c09-813e-1be5beb4e8ed" containerName="dnsmasq-dns" Oct 11 05:08:53 crc kubenswrapper[4651]: I1011 05:08:53.065509 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="9b7de539-a960-4684-89a8-80cf59a5616f" containerName="barbican-api" Oct 11 05:08:53 crc kubenswrapper[4651]: I1011 05:08:53.065571 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="9b7de539-a960-4684-89a8-80cf59a5616f" containerName="barbican-api-log" Oct 11 05:08:53 crc kubenswrapper[4651]: I1011 05:08:53.067039 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 11 05:08:53 crc kubenswrapper[4651]: I1011 05:08:53.070549 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Oct 11 05:08:53 crc kubenswrapper[4651]: I1011 05:08:53.078415 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 11 05:08:53 crc kubenswrapper[4651]: I1011 05:08:53.179759 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9bee9214-1b63-4ef6-81ca-507ef630559b-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"9bee9214-1b63-4ef6-81ca-507ef630559b\") " pod="openstack/cinder-scheduler-0" Oct 11 05:08:53 crc kubenswrapper[4651]: I1011 05:08:53.179861 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9bee9214-1b63-4ef6-81ca-507ef630559b-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"9bee9214-1b63-4ef6-81ca-507ef630559b\") " pod="openstack/cinder-scheduler-0" Oct 11 05:08:53 crc kubenswrapper[4651]: I1011 05:08:53.179901 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9bee9214-1b63-4ef6-81ca-507ef630559b-config-data\") pod \"cinder-scheduler-0\" (UID: \"9bee9214-1b63-4ef6-81ca-507ef630559b\") " pod="openstack/cinder-scheduler-0" Oct 11 05:08:53 crc kubenswrapper[4651]: I1011 05:08:53.179945 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9bee9214-1b63-4ef6-81ca-507ef630559b-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"9bee9214-1b63-4ef6-81ca-507ef630559b\") " pod="openstack/cinder-scheduler-0" Oct 11 05:08:53 crc kubenswrapper[4651]: I1011 05:08:53.179977 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9bee9214-1b63-4ef6-81ca-507ef630559b-scripts\") pod \"cinder-scheduler-0\" (UID: \"9bee9214-1b63-4ef6-81ca-507ef630559b\") " pod="openstack/cinder-scheduler-0" Oct 11 05:08:53 crc kubenswrapper[4651]: I1011 05:08:53.180023 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z5nhn\" (UniqueName: \"kubernetes.io/projected/9bee9214-1b63-4ef6-81ca-507ef630559b-kube-api-access-z5nhn\") pod \"cinder-scheduler-0\" (UID: \"9bee9214-1b63-4ef6-81ca-507ef630559b\") " pod="openstack/cinder-scheduler-0" Oct 11 05:08:53 crc kubenswrapper[4651]: I1011 05:08:53.281579 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9bee9214-1b63-4ef6-81ca-507ef630559b-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"9bee9214-1b63-4ef6-81ca-507ef630559b\") " pod="openstack/cinder-scheduler-0" Oct 11 05:08:53 crc kubenswrapper[4651]: I1011 05:08:53.281756 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9bee9214-1b63-4ef6-81ca-507ef630559b-scripts\") pod \"cinder-scheduler-0\" (UID: \"9bee9214-1b63-4ef6-81ca-507ef630559b\") " pod="openstack/cinder-scheduler-0" Oct 11 05:08:53 crc kubenswrapper[4651]: I1011 05:08:53.281911 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z5nhn\" (UniqueName: \"kubernetes.io/projected/9bee9214-1b63-4ef6-81ca-507ef630559b-kube-api-access-z5nhn\") pod \"cinder-scheduler-0\" (UID: \"9bee9214-1b63-4ef6-81ca-507ef630559b\") " pod="openstack/cinder-scheduler-0" Oct 11 05:08:53 crc kubenswrapper[4651]: I1011 05:08:53.282192 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9bee9214-1b63-4ef6-81ca-507ef630559b-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"9bee9214-1b63-4ef6-81ca-507ef630559b\") " pod="openstack/cinder-scheduler-0" Oct 11 05:08:53 crc kubenswrapper[4651]: I1011 05:08:53.282314 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9bee9214-1b63-4ef6-81ca-507ef630559b-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"9bee9214-1b63-4ef6-81ca-507ef630559b\") " pod="openstack/cinder-scheduler-0" Oct 11 05:08:53 crc kubenswrapper[4651]: I1011 05:08:53.282386 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9bee9214-1b63-4ef6-81ca-507ef630559b-config-data\") pod \"cinder-scheduler-0\" (UID: \"9bee9214-1b63-4ef6-81ca-507ef630559b\") " pod="openstack/cinder-scheduler-0" Oct 11 05:08:53 crc kubenswrapper[4651]: I1011 05:08:53.283896 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9bee9214-1b63-4ef6-81ca-507ef630559b-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"9bee9214-1b63-4ef6-81ca-507ef630559b\") " pod="openstack/cinder-scheduler-0" Oct 11 05:08:53 crc kubenswrapper[4651]: I1011 05:08:53.287192 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9bee9214-1b63-4ef6-81ca-507ef630559b-scripts\") pod \"cinder-scheduler-0\" (UID: \"9bee9214-1b63-4ef6-81ca-507ef630559b\") " pod="openstack/cinder-scheduler-0" Oct 11 05:08:53 crc kubenswrapper[4651]: I1011 05:08:53.287224 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9bee9214-1b63-4ef6-81ca-507ef630559b-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"9bee9214-1b63-4ef6-81ca-507ef630559b\") " pod="openstack/cinder-scheduler-0" Oct 11 05:08:53 crc kubenswrapper[4651]: I1011 05:08:53.290496 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9bee9214-1b63-4ef6-81ca-507ef630559b-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"9bee9214-1b63-4ef6-81ca-507ef630559b\") " pod="openstack/cinder-scheduler-0" Oct 11 05:08:53 crc kubenswrapper[4651]: I1011 05:08:53.292095 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9bee9214-1b63-4ef6-81ca-507ef630559b-config-data\") pod \"cinder-scheduler-0\" (UID: \"9bee9214-1b63-4ef6-81ca-507ef630559b\") " pod="openstack/cinder-scheduler-0" Oct 11 05:08:53 crc kubenswrapper[4651]: I1011 05:08:53.307777 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z5nhn\" (UniqueName: \"kubernetes.io/projected/9bee9214-1b63-4ef6-81ca-507ef630559b-kube-api-access-z5nhn\") pod \"cinder-scheduler-0\" (UID: \"9bee9214-1b63-4ef6-81ca-507ef630559b\") " pod="openstack/cinder-scheduler-0" Oct 11 05:08:53 crc kubenswrapper[4651]: I1011 05:08:53.383388 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 11 05:08:53 crc kubenswrapper[4651]: I1011 05:08:53.885541 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2df51120-c272-4eb3-bbdf-85c2753f7640" path="/var/lib/kubelet/pods/2df51120-c272-4eb3-bbdf-85c2753f7640/volumes" Oct 11 05:08:53 crc kubenswrapper[4651]: I1011 05:08:53.900794 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 11 05:08:53 crc kubenswrapper[4651]: I1011 05:08:53.999740 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"9bee9214-1b63-4ef6-81ca-507ef630559b","Type":"ContainerStarted","Data":"65cdf1b1922b6dd698ed04d31077ca48dabf99bd52841cfc08d05d87ca102675"} Oct 11 05:08:54 crc kubenswrapper[4651]: I1011 05:08:54.414148 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Oct 11 05:08:55 crc kubenswrapper[4651]: I1011 05:08:55.015127 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"9bee9214-1b63-4ef6-81ca-507ef630559b","Type":"ContainerStarted","Data":"c145b1fcc5faa4454da68cb50ce07df9b87e7b3ee17783e1cd1086ff3d82dca6"} Oct 11 05:08:55 crc kubenswrapper[4651]: I1011 05:08:55.544522 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Oct 11 05:08:55 crc kubenswrapper[4651]: I1011 05:08:55.545890 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Oct 11 05:08:55 crc kubenswrapper[4651]: I1011 05:08:55.547864 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Oct 11 05:08:55 crc kubenswrapper[4651]: I1011 05:08:55.548159 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-5bfqx" Oct 11 05:08:55 crc kubenswrapper[4651]: I1011 05:08:55.548298 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Oct 11 05:08:55 crc kubenswrapper[4651]: I1011 05:08:55.574682 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Oct 11 05:08:55 crc kubenswrapper[4651]: I1011 05:08:55.628155 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/5436303b-9926-42ee-82ba-e091535d3630-openstack-config\") pod \"openstackclient\" (UID: \"5436303b-9926-42ee-82ba-e091535d3630\") " pod="openstack/openstackclient" Oct 11 05:08:55 crc kubenswrapper[4651]: I1011 05:08:55.628193 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kx6gg\" (UniqueName: \"kubernetes.io/projected/5436303b-9926-42ee-82ba-e091535d3630-kube-api-access-kx6gg\") pod \"openstackclient\" (UID: \"5436303b-9926-42ee-82ba-e091535d3630\") " pod="openstack/openstackclient" Oct 11 05:08:55 crc kubenswrapper[4651]: I1011 05:08:55.628219 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5436303b-9926-42ee-82ba-e091535d3630-combined-ca-bundle\") pod \"openstackclient\" (UID: \"5436303b-9926-42ee-82ba-e091535d3630\") " pod="openstack/openstackclient" Oct 11 05:08:55 crc kubenswrapper[4651]: I1011 05:08:55.628382 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/5436303b-9926-42ee-82ba-e091535d3630-openstack-config-secret\") pod \"openstackclient\" (UID: \"5436303b-9926-42ee-82ba-e091535d3630\") " pod="openstack/openstackclient" Oct 11 05:08:55 crc kubenswrapper[4651]: I1011 05:08:55.730178 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/5436303b-9926-42ee-82ba-e091535d3630-openstack-config-secret\") pod \"openstackclient\" (UID: \"5436303b-9926-42ee-82ba-e091535d3630\") " pod="openstack/openstackclient" Oct 11 05:08:55 crc kubenswrapper[4651]: I1011 05:08:55.730565 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/5436303b-9926-42ee-82ba-e091535d3630-openstack-config\") pod \"openstackclient\" (UID: \"5436303b-9926-42ee-82ba-e091535d3630\") " pod="openstack/openstackclient" Oct 11 05:08:55 crc kubenswrapper[4651]: I1011 05:08:55.730658 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kx6gg\" (UniqueName: \"kubernetes.io/projected/5436303b-9926-42ee-82ba-e091535d3630-kube-api-access-kx6gg\") pod \"openstackclient\" (UID: \"5436303b-9926-42ee-82ba-e091535d3630\") " pod="openstack/openstackclient" Oct 11 05:08:55 crc kubenswrapper[4651]: I1011 05:08:55.730749 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5436303b-9926-42ee-82ba-e091535d3630-combined-ca-bundle\") pod \"openstackclient\" (UID: \"5436303b-9926-42ee-82ba-e091535d3630\") " pod="openstack/openstackclient" Oct 11 05:08:55 crc kubenswrapper[4651]: I1011 05:08:55.731612 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/5436303b-9926-42ee-82ba-e091535d3630-openstack-config\") pod \"openstackclient\" (UID: \"5436303b-9926-42ee-82ba-e091535d3630\") " pod="openstack/openstackclient" Oct 11 05:08:55 crc kubenswrapper[4651]: I1011 05:08:55.739303 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/5436303b-9926-42ee-82ba-e091535d3630-openstack-config-secret\") pod \"openstackclient\" (UID: \"5436303b-9926-42ee-82ba-e091535d3630\") " pod="openstack/openstackclient" Oct 11 05:08:55 crc kubenswrapper[4651]: I1011 05:08:55.739443 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5436303b-9926-42ee-82ba-e091535d3630-combined-ca-bundle\") pod \"openstackclient\" (UID: \"5436303b-9926-42ee-82ba-e091535d3630\") " pod="openstack/openstackclient" Oct 11 05:08:55 crc kubenswrapper[4651]: I1011 05:08:55.780537 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kx6gg\" (UniqueName: \"kubernetes.io/projected/5436303b-9926-42ee-82ba-e091535d3630-kube-api-access-kx6gg\") pod \"openstackclient\" (UID: \"5436303b-9926-42ee-82ba-e091535d3630\") " pod="openstack/openstackclient" Oct 11 05:08:55 crc kubenswrapper[4651]: I1011 05:08:55.781801 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstackclient"] Oct 11 05:08:55 crc kubenswrapper[4651]: I1011 05:08:55.783365 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Oct 11 05:08:55 crc kubenswrapper[4651]: I1011 05:08:55.803233 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstackclient"] Oct 11 05:08:55 crc kubenswrapper[4651]: I1011 05:08:55.831873 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Oct 11 05:08:55 crc kubenswrapper[4651]: I1011 05:08:55.834545 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Oct 11 05:08:55 crc kubenswrapper[4651]: I1011 05:08:55.851228 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Oct 11 05:08:55 crc kubenswrapper[4651]: E1011 05:08:55.926551 4651 log.go:32] "RunPodSandbox from runtime service failed" err=< Oct 11 05:08:55 crc kubenswrapper[4651]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_openstackclient_openstack_5436303b-9926-42ee-82ba-e091535d3630_0(59c297712f3abd072b1cf5d00a614ef338fbf8dd239843ff4866c1d279d0c708): error adding pod openstack_openstackclient to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"59c297712f3abd072b1cf5d00a614ef338fbf8dd239843ff4866c1d279d0c708" Netns:"/var/run/netns/9ddfc4ac-e52b-4440-82e6-bf9eab9c6990" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openstack;K8S_POD_NAME=openstackclient;K8S_POD_INFRA_CONTAINER_ID=59c297712f3abd072b1cf5d00a614ef338fbf8dd239843ff4866c1d279d0c708;K8S_POD_UID=5436303b-9926-42ee-82ba-e091535d3630" Path:"" ERRORED: error configuring pod [openstack/openstackclient] networking: Multus: [openstack/openstackclient/5436303b-9926-42ee-82ba-e091535d3630]: expected pod UID "5436303b-9926-42ee-82ba-e091535d3630" but got "47aacf69-f6eb-4e85-9b70-8f241bfa812f" from Kube API Oct 11 05:08:55 crc kubenswrapper[4651]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Oct 11 05:08:55 crc kubenswrapper[4651]: > Oct 11 05:08:55 crc kubenswrapper[4651]: E1011 05:08:55.926641 4651 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err=< Oct 11 05:08:55 crc kubenswrapper[4651]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_openstackclient_openstack_5436303b-9926-42ee-82ba-e091535d3630_0(59c297712f3abd072b1cf5d00a614ef338fbf8dd239843ff4866c1d279d0c708): error adding pod openstack_openstackclient to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"59c297712f3abd072b1cf5d00a614ef338fbf8dd239843ff4866c1d279d0c708" Netns:"/var/run/netns/9ddfc4ac-e52b-4440-82e6-bf9eab9c6990" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openstack;K8S_POD_NAME=openstackclient;K8S_POD_INFRA_CONTAINER_ID=59c297712f3abd072b1cf5d00a614ef338fbf8dd239843ff4866c1d279d0c708;K8S_POD_UID=5436303b-9926-42ee-82ba-e091535d3630" Path:"" ERRORED: error configuring pod [openstack/openstackclient] networking: Multus: [openstack/openstackclient/5436303b-9926-42ee-82ba-e091535d3630]: expected pod UID "5436303b-9926-42ee-82ba-e091535d3630" but got "47aacf69-f6eb-4e85-9b70-8f241bfa812f" from Kube API Oct 11 05:08:55 crc kubenswrapper[4651]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Oct 11 05:08:55 crc kubenswrapper[4651]: > pod="openstack/openstackclient" Oct 11 05:08:55 crc kubenswrapper[4651]: I1011 05:08:55.933199 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/47aacf69-f6eb-4e85-9b70-8f241bfa812f-openstack-config-secret\") pod \"openstackclient\" (UID: \"47aacf69-f6eb-4e85-9b70-8f241bfa812f\") " pod="openstack/openstackclient" Oct 11 05:08:55 crc kubenswrapper[4651]: I1011 05:08:55.933347 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/47aacf69-f6eb-4e85-9b70-8f241bfa812f-openstack-config\") pod \"openstackclient\" (UID: \"47aacf69-f6eb-4e85-9b70-8f241bfa812f\") " pod="openstack/openstackclient" Oct 11 05:08:55 crc kubenswrapper[4651]: I1011 05:08:55.933429 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47aacf69-f6eb-4e85-9b70-8f241bfa812f-combined-ca-bundle\") pod \"openstackclient\" (UID: \"47aacf69-f6eb-4e85-9b70-8f241bfa812f\") " pod="openstack/openstackclient" Oct 11 05:08:55 crc kubenswrapper[4651]: I1011 05:08:55.933498 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-znljt\" (UniqueName: \"kubernetes.io/projected/47aacf69-f6eb-4e85-9b70-8f241bfa812f-kube-api-access-znljt\") pod \"openstackclient\" (UID: \"47aacf69-f6eb-4e85-9b70-8f241bfa812f\") " pod="openstack/openstackclient" Oct 11 05:08:56 crc kubenswrapper[4651]: I1011 05:08:56.026065 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Oct 11 05:08:56 crc kubenswrapper[4651]: I1011 05:08:56.027287 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"9bee9214-1b63-4ef6-81ca-507ef630559b","Type":"ContainerStarted","Data":"da06fda549af6e4fb8d7344709c9bd0153f7b233704350756c2c0ff2d0007f20"} Oct 11 05:08:56 crc kubenswrapper[4651]: I1011 05:08:56.034541 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/47aacf69-f6eb-4e85-9b70-8f241bfa812f-openstack-config\") pod \"openstackclient\" (UID: \"47aacf69-f6eb-4e85-9b70-8f241bfa812f\") " pod="openstack/openstackclient" Oct 11 05:08:56 crc kubenswrapper[4651]: I1011 05:08:56.034592 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47aacf69-f6eb-4e85-9b70-8f241bfa812f-combined-ca-bundle\") pod \"openstackclient\" (UID: \"47aacf69-f6eb-4e85-9b70-8f241bfa812f\") " pod="openstack/openstackclient" Oct 11 05:08:56 crc kubenswrapper[4651]: I1011 05:08:56.034630 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-znljt\" (UniqueName: \"kubernetes.io/projected/47aacf69-f6eb-4e85-9b70-8f241bfa812f-kube-api-access-znljt\") pod \"openstackclient\" (UID: \"47aacf69-f6eb-4e85-9b70-8f241bfa812f\") " pod="openstack/openstackclient" Oct 11 05:08:56 crc kubenswrapper[4651]: I1011 05:08:56.034685 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/47aacf69-f6eb-4e85-9b70-8f241bfa812f-openstack-config-secret\") pod \"openstackclient\" (UID: \"47aacf69-f6eb-4e85-9b70-8f241bfa812f\") " pod="openstack/openstackclient" Oct 11 05:08:56 crc kubenswrapper[4651]: I1011 05:08:56.035916 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/47aacf69-f6eb-4e85-9b70-8f241bfa812f-openstack-config\") pod \"openstackclient\" (UID: \"47aacf69-f6eb-4e85-9b70-8f241bfa812f\") " pod="openstack/openstackclient" Oct 11 05:08:56 crc kubenswrapper[4651]: I1011 05:08:56.043040 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47aacf69-f6eb-4e85-9b70-8f241bfa812f-combined-ca-bundle\") pod \"openstackclient\" (UID: \"47aacf69-f6eb-4e85-9b70-8f241bfa812f\") " pod="openstack/openstackclient" Oct 11 05:08:56 crc kubenswrapper[4651]: I1011 05:08:56.044143 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Oct 11 05:08:56 crc kubenswrapper[4651]: I1011 05:08:56.044393 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/47aacf69-f6eb-4e85-9b70-8f241bfa812f-openstack-config-secret\") pod \"openstackclient\" (UID: \"47aacf69-f6eb-4e85-9b70-8f241bfa812f\") " pod="openstack/openstackclient" Oct 11 05:08:56 crc kubenswrapper[4651]: I1011 05:08:56.058970 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-znljt\" (UniqueName: \"kubernetes.io/projected/47aacf69-f6eb-4e85-9b70-8f241bfa812f-kube-api-access-znljt\") pod \"openstackclient\" (UID: \"47aacf69-f6eb-4e85-9b70-8f241bfa812f\") " pod="openstack/openstackclient" Oct 11 05:08:56 crc kubenswrapper[4651]: I1011 05:08:56.060300 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=3.060285463 podStartE2EDuration="3.060285463s" podCreationTimestamp="2025-10-11 05:08:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 05:08:56.059291918 +0000 UTC m=+1056.955524734" watchObservedRunningTime="2025-10-11 05:08:56.060285463 +0000 UTC m=+1056.956518259" Oct 11 05:08:56 crc kubenswrapper[4651]: I1011 05:08:56.065051 4651 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="5436303b-9926-42ee-82ba-e091535d3630" podUID="47aacf69-f6eb-4e85-9b70-8f241bfa812f" Oct 11 05:08:56 crc kubenswrapper[4651]: I1011 05:08:56.135728 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/5436303b-9926-42ee-82ba-e091535d3630-openstack-config\") pod \"5436303b-9926-42ee-82ba-e091535d3630\" (UID: \"5436303b-9926-42ee-82ba-e091535d3630\") " Oct 11 05:08:56 crc kubenswrapper[4651]: I1011 05:08:56.135801 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/5436303b-9926-42ee-82ba-e091535d3630-openstack-config-secret\") pod \"5436303b-9926-42ee-82ba-e091535d3630\" (UID: \"5436303b-9926-42ee-82ba-e091535d3630\") " Oct 11 05:08:56 crc kubenswrapper[4651]: I1011 05:08:56.135842 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5436303b-9926-42ee-82ba-e091535d3630-combined-ca-bundle\") pod \"5436303b-9926-42ee-82ba-e091535d3630\" (UID: \"5436303b-9926-42ee-82ba-e091535d3630\") " Oct 11 05:08:56 crc kubenswrapper[4651]: I1011 05:08:56.135945 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kx6gg\" (UniqueName: \"kubernetes.io/projected/5436303b-9926-42ee-82ba-e091535d3630-kube-api-access-kx6gg\") pod \"5436303b-9926-42ee-82ba-e091535d3630\" (UID: \"5436303b-9926-42ee-82ba-e091535d3630\") " Oct 11 05:08:56 crc kubenswrapper[4651]: I1011 05:08:56.136220 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5436303b-9926-42ee-82ba-e091535d3630-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "5436303b-9926-42ee-82ba-e091535d3630" (UID: "5436303b-9926-42ee-82ba-e091535d3630"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:08:56 crc kubenswrapper[4651]: I1011 05:08:56.136561 4651 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/5436303b-9926-42ee-82ba-e091535d3630-openstack-config\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:56 crc kubenswrapper[4651]: I1011 05:08:56.142945 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5436303b-9926-42ee-82ba-e091535d3630-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5436303b-9926-42ee-82ba-e091535d3630" (UID: "5436303b-9926-42ee-82ba-e091535d3630"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:08:56 crc kubenswrapper[4651]: I1011 05:08:56.144049 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5436303b-9926-42ee-82ba-e091535d3630-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "5436303b-9926-42ee-82ba-e091535d3630" (UID: "5436303b-9926-42ee-82ba-e091535d3630"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:08:56 crc kubenswrapper[4651]: I1011 05:08:56.144315 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5436303b-9926-42ee-82ba-e091535d3630-kube-api-access-kx6gg" (OuterVolumeSpecName: "kube-api-access-kx6gg") pod "5436303b-9926-42ee-82ba-e091535d3630" (UID: "5436303b-9926-42ee-82ba-e091535d3630"). InnerVolumeSpecName "kube-api-access-kx6gg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:08:56 crc kubenswrapper[4651]: I1011 05:08:56.194220 4651 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-68976f6bc6-9jl66" podUID="93e6a5fd-218d-44c2-9bf9-2796b1ff0c33" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.150:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.150:8443: connect: connection refused" Oct 11 05:08:56 crc kubenswrapper[4651]: I1011 05:08:56.204962 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Oct 11 05:08:56 crc kubenswrapper[4651]: I1011 05:08:56.238613 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kx6gg\" (UniqueName: \"kubernetes.io/projected/5436303b-9926-42ee-82ba-e091535d3630-kube-api-access-kx6gg\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:56 crc kubenswrapper[4651]: I1011 05:08:56.239185 4651 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/5436303b-9926-42ee-82ba-e091535d3630-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:56 crc kubenswrapper[4651]: I1011 05:08:56.239256 4651 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5436303b-9926-42ee-82ba-e091535d3630-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 05:08:56 crc kubenswrapper[4651]: I1011 05:08:56.674960 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Oct 11 05:08:56 crc kubenswrapper[4651]: W1011 05:08:56.678941 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod47aacf69_f6eb_4e85_9b70_8f241bfa812f.slice/crio-67bef98fcdfffb89474348f523dfda29a20b01c6ed306e795be0b44ef2981b83 WatchSource:0}: Error finding container 67bef98fcdfffb89474348f523dfda29a20b01c6ed306e795be0b44ef2981b83: Status 404 returned error can't find the container with id 67bef98fcdfffb89474348f523dfda29a20b01c6ed306e795be0b44ef2981b83 Oct 11 05:08:57 crc kubenswrapper[4651]: I1011 05:08:57.034766 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"47aacf69-f6eb-4e85-9b70-8f241bfa812f","Type":"ContainerStarted","Data":"67bef98fcdfffb89474348f523dfda29a20b01c6ed306e795be0b44ef2981b83"} Oct 11 05:08:57 crc kubenswrapper[4651]: I1011 05:08:57.035053 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Oct 11 05:08:57 crc kubenswrapper[4651]: I1011 05:08:57.053268 4651 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="5436303b-9926-42ee-82ba-e091535d3630" podUID="47aacf69-f6eb-4e85-9b70-8f241bfa812f" Oct 11 05:08:57 crc kubenswrapper[4651]: I1011 05:08:57.886737 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5436303b-9926-42ee-82ba-e091535d3630" path="/var/lib/kubelet/pods/5436303b-9926-42ee-82ba-e091535d3630/volumes" Oct 11 05:08:58 crc kubenswrapper[4651]: I1011 05:08:58.385562 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Oct 11 05:08:58 crc kubenswrapper[4651]: I1011 05:08:58.601126 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-proxy-695465f8bc-lbxbx"] Oct 11 05:08:58 crc kubenswrapper[4651]: I1011 05:08:58.616449 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-695465f8bc-lbxbx"] Oct 11 05:08:58 crc kubenswrapper[4651]: I1011 05:08:58.616571 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-695465f8bc-lbxbx" Oct 11 05:08:58 crc kubenswrapper[4651]: I1011 05:08:58.619072 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-internal-svc" Oct 11 05:08:58 crc kubenswrapper[4651]: I1011 05:08:58.619338 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-public-svc" Oct 11 05:08:58 crc kubenswrapper[4651]: I1011 05:08:58.619486 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Oct 11 05:08:58 crc kubenswrapper[4651]: I1011 05:08:58.680129 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e8259a36-62ec-4cdc-b377-3574bf0bead5-internal-tls-certs\") pod \"swift-proxy-695465f8bc-lbxbx\" (UID: \"e8259a36-62ec-4cdc-b377-3574bf0bead5\") " pod="openstack/swift-proxy-695465f8bc-lbxbx" Oct 11 05:08:58 crc kubenswrapper[4651]: I1011 05:08:58.680185 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e8259a36-62ec-4cdc-b377-3574bf0bead5-combined-ca-bundle\") pod \"swift-proxy-695465f8bc-lbxbx\" (UID: \"e8259a36-62ec-4cdc-b377-3574bf0bead5\") " pod="openstack/swift-proxy-695465f8bc-lbxbx" Oct 11 05:08:58 crc kubenswrapper[4651]: I1011 05:08:58.680214 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kkr55\" (UniqueName: \"kubernetes.io/projected/e8259a36-62ec-4cdc-b377-3574bf0bead5-kube-api-access-kkr55\") pod \"swift-proxy-695465f8bc-lbxbx\" (UID: \"e8259a36-62ec-4cdc-b377-3574bf0bead5\") " pod="openstack/swift-proxy-695465f8bc-lbxbx" Oct 11 05:08:58 crc kubenswrapper[4651]: I1011 05:08:58.680252 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e8259a36-62ec-4cdc-b377-3574bf0bead5-config-data\") pod \"swift-proxy-695465f8bc-lbxbx\" (UID: \"e8259a36-62ec-4cdc-b377-3574bf0bead5\") " pod="openstack/swift-proxy-695465f8bc-lbxbx" Oct 11 05:08:58 crc kubenswrapper[4651]: I1011 05:08:58.680282 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e8259a36-62ec-4cdc-b377-3574bf0bead5-run-httpd\") pod \"swift-proxy-695465f8bc-lbxbx\" (UID: \"e8259a36-62ec-4cdc-b377-3574bf0bead5\") " pod="openstack/swift-proxy-695465f8bc-lbxbx" Oct 11 05:08:58 crc kubenswrapper[4651]: I1011 05:08:58.680301 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e8259a36-62ec-4cdc-b377-3574bf0bead5-public-tls-certs\") pod \"swift-proxy-695465f8bc-lbxbx\" (UID: \"e8259a36-62ec-4cdc-b377-3574bf0bead5\") " pod="openstack/swift-proxy-695465f8bc-lbxbx" Oct 11 05:08:58 crc kubenswrapper[4651]: I1011 05:08:58.680352 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e8259a36-62ec-4cdc-b377-3574bf0bead5-log-httpd\") pod \"swift-proxy-695465f8bc-lbxbx\" (UID: \"e8259a36-62ec-4cdc-b377-3574bf0bead5\") " pod="openstack/swift-proxy-695465f8bc-lbxbx" Oct 11 05:08:58 crc kubenswrapper[4651]: I1011 05:08:58.680368 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/e8259a36-62ec-4cdc-b377-3574bf0bead5-etc-swift\") pod \"swift-proxy-695465f8bc-lbxbx\" (UID: \"e8259a36-62ec-4cdc-b377-3574bf0bead5\") " pod="openstack/swift-proxy-695465f8bc-lbxbx" Oct 11 05:08:58 crc kubenswrapper[4651]: I1011 05:08:58.781950 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e8259a36-62ec-4cdc-b377-3574bf0bead5-log-httpd\") pod \"swift-proxy-695465f8bc-lbxbx\" (UID: \"e8259a36-62ec-4cdc-b377-3574bf0bead5\") " pod="openstack/swift-proxy-695465f8bc-lbxbx" Oct 11 05:08:58 crc kubenswrapper[4651]: I1011 05:08:58.781988 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/e8259a36-62ec-4cdc-b377-3574bf0bead5-etc-swift\") pod \"swift-proxy-695465f8bc-lbxbx\" (UID: \"e8259a36-62ec-4cdc-b377-3574bf0bead5\") " pod="openstack/swift-proxy-695465f8bc-lbxbx" Oct 11 05:08:58 crc kubenswrapper[4651]: I1011 05:08:58.782058 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e8259a36-62ec-4cdc-b377-3574bf0bead5-internal-tls-certs\") pod \"swift-proxy-695465f8bc-lbxbx\" (UID: \"e8259a36-62ec-4cdc-b377-3574bf0bead5\") " pod="openstack/swift-proxy-695465f8bc-lbxbx" Oct 11 05:08:58 crc kubenswrapper[4651]: I1011 05:08:58.782092 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e8259a36-62ec-4cdc-b377-3574bf0bead5-combined-ca-bundle\") pod \"swift-proxy-695465f8bc-lbxbx\" (UID: \"e8259a36-62ec-4cdc-b377-3574bf0bead5\") " pod="openstack/swift-proxy-695465f8bc-lbxbx" Oct 11 05:08:58 crc kubenswrapper[4651]: I1011 05:08:58.782118 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kkr55\" (UniqueName: \"kubernetes.io/projected/e8259a36-62ec-4cdc-b377-3574bf0bead5-kube-api-access-kkr55\") pod \"swift-proxy-695465f8bc-lbxbx\" (UID: \"e8259a36-62ec-4cdc-b377-3574bf0bead5\") " pod="openstack/swift-proxy-695465f8bc-lbxbx" Oct 11 05:08:58 crc kubenswrapper[4651]: I1011 05:08:58.782153 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e8259a36-62ec-4cdc-b377-3574bf0bead5-config-data\") pod \"swift-proxy-695465f8bc-lbxbx\" (UID: \"e8259a36-62ec-4cdc-b377-3574bf0bead5\") " pod="openstack/swift-proxy-695465f8bc-lbxbx" Oct 11 05:08:58 crc kubenswrapper[4651]: I1011 05:08:58.782171 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e8259a36-62ec-4cdc-b377-3574bf0bead5-run-httpd\") pod \"swift-proxy-695465f8bc-lbxbx\" (UID: \"e8259a36-62ec-4cdc-b377-3574bf0bead5\") " pod="openstack/swift-proxy-695465f8bc-lbxbx" Oct 11 05:08:58 crc kubenswrapper[4651]: I1011 05:08:58.782191 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e8259a36-62ec-4cdc-b377-3574bf0bead5-public-tls-certs\") pod \"swift-proxy-695465f8bc-lbxbx\" (UID: \"e8259a36-62ec-4cdc-b377-3574bf0bead5\") " pod="openstack/swift-proxy-695465f8bc-lbxbx" Oct 11 05:08:58 crc kubenswrapper[4651]: I1011 05:08:58.784430 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e8259a36-62ec-4cdc-b377-3574bf0bead5-run-httpd\") pod \"swift-proxy-695465f8bc-lbxbx\" (UID: \"e8259a36-62ec-4cdc-b377-3574bf0bead5\") " pod="openstack/swift-proxy-695465f8bc-lbxbx" Oct 11 05:08:58 crc kubenswrapper[4651]: I1011 05:08:58.785983 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e8259a36-62ec-4cdc-b377-3574bf0bead5-log-httpd\") pod \"swift-proxy-695465f8bc-lbxbx\" (UID: \"e8259a36-62ec-4cdc-b377-3574bf0bead5\") " pod="openstack/swift-proxy-695465f8bc-lbxbx" Oct 11 05:08:58 crc kubenswrapper[4651]: I1011 05:08:58.788015 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e8259a36-62ec-4cdc-b377-3574bf0bead5-public-tls-certs\") pod \"swift-proxy-695465f8bc-lbxbx\" (UID: \"e8259a36-62ec-4cdc-b377-3574bf0bead5\") " pod="openstack/swift-proxy-695465f8bc-lbxbx" Oct 11 05:08:58 crc kubenswrapper[4651]: I1011 05:08:58.789565 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e8259a36-62ec-4cdc-b377-3574bf0bead5-combined-ca-bundle\") pod \"swift-proxy-695465f8bc-lbxbx\" (UID: \"e8259a36-62ec-4cdc-b377-3574bf0bead5\") " pod="openstack/swift-proxy-695465f8bc-lbxbx" Oct 11 05:08:58 crc kubenswrapper[4651]: I1011 05:08:58.791263 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/e8259a36-62ec-4cdc-b377-3574bf0bead5-etc-swift\") pod \"swift-proxy-695465f8bc-lbxbx\" (UID: \"e8259a36-62ec-4cdc-b377-3574bf0bead5\") " pod="openstack/swift-proxy-695465f8bc-lbxbx" Oct 11 05:08:58 crc kubenswrapper[4651]: I1011 05:08:58.800571 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kkr55\" (UniqueName: \"kubernetes.io/projected/e8259a36-62ec-4cdc-b377-3574bf0bead5-kube-api-access-kkr55\") pod \"swift-proxy-695465f8bc-lbxbx\" (UID: \"e8259a36-62ec-4cdc-b377-3574bf0bead5\") " pod="openstack/swift-proxy-695465f8bc-lbxbx" Oct 11 05:08:58 crc kubenswrapper[4651]: I1011 05:08:58.801005 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e8259a36-62ec-4cdc-b377-3574bf0bead5-config-data\") pod \"swift-proxy-695465f8bc-lbxbx\" (UID: \"e8259a36-62ec-4cdc-b377-3574bf0bead5\") " pod="openstack/swift-proxy-695465f8bc-lbxbx" Oct 11 05:08:58 crc kubenswrapper[4651]: I1011 05:08:58.812451 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e8259a36-62ec-4cdc-b377-3574bf0bead5-internal-tls-certs\") pod \"swift-proxy-695465f8bc-lbxbx\" (UID: \"e8259a36-62ec-4cdc-b377-3574bf0bead5\") " pod="openstack/swift-proxy-695465f8bc-lbxbx" Oct 11 05:08:58 crc kubenswrapper[4651]: I1011 05:08:58.939277 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-695465f8bc-lbxbx" Oct 11 05:08:59 crc kubenswrapper[4651]: I1011 05:08:59.317407 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 11 05:08:59 crc kubenswrapper[4651]: I1011 05:08:59.318545 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0c69b234-aa8e-440b-b730-b901ebe0a7b1" containerName="proxy-httpd" containerID="cri-o://d9c59e5c3243b58d6045dcae171ee06c82633ecf88e770bd30814b5938e504d2" gracePeriod=30 Oct 11 05:08:59 crc kubenswrapper[4651]: I1011 05:08:59.318681 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0c69b234-aa8e-440b-b730-b901ebe0a7b1" containerName="sg-core" containerID="cri-o://d851878bbb579eae5133c54b976d0f45d2db8027ecb879e9dfa2974147c2688f" gracePeriod=30 Oct 11 05:08:59 crc kubenswrapper[4651]: I1011 05:08:59.318749 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0c69b234-aa8e-440b-b730-b901ebe0a7b1" containerName="ceilometer-notification-agent" containerID="cri-o://87d142c83cbdde5c738e3a4deb5da1b53b953facf14e583ff63f3187908e6c4c" gracePeriod=30 Oct 11 05:08:59 crc kubenswrapper[4651]: I1011 05:08:59.318505 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0c69b234-aa8e-440b-b730-b901ebe0a7b1" containerName="ceilometer-central-agent" containerID="cri-o://3c0f85abdf531f0692e26e985d330571d01a76caaf6618b4aca879dd281d80f2" gracePeriod=30 Oct 11 05:08:59 crc kubenswrapper[4651]: I1011 05:08:59.324534 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Oct 11 05:08:59 crc kubenswrapper[4651]: I1011 05:08:59.375393 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-74cbfd888-nqwlq" Oct 11 05:08:59 crc kubenswrapper[4651]: I1011 05:08:59.560624 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-695465f8bc-lbxbx"] Oct 11 05:08:59 crc kubenswrapper[4651]: W1011 05:08:59.568195 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode8259a36_62ec_4cdc_b377_3574bf0bead5.slice/crio-5d514678425822d0ff13d9726a446884510fdfa7c81a4372ab1bc09b0d8e53f0 WatchSource:0}: Error finding container 5d514678425822d0ff13d9726a446884510fdfa7c81a4372ab1bc09b0d8e53f0: Status 404 returned error can't find the container with id 5d514678425822d0ff13d9726a446884510fdfa7c81a4372ab1bc09b0d8e53f0 Oct 11 05:09:00 crc kubenswrapper[4651]: I1011 05:09:00.117786 4651 generic.go:334] "Generic (PLEG): container finished" podID="0c69b234-aa8e-440b-b730-b901ebe0a7b1" containerID="d9c59e5c3243b58d6045dcae171ee06c82633ecf88e770bd30814b5938e504d2" exitCode=0 Oct 11 05:09:00 crc kubenswrapper[4651]: I1011 05:09:00.118067 4651 generic.go:334] "Generic (PLEG): container finished" podID="0c69b234-aa8e-440b-b730-b901ebe0a7b1" containerID="d851878bbb579eae5133c54b976d0f45d2db8027ecb879e9dfa2974147c2688f" exitCode=2 Oct 11 05:09:00 crc kubenswrapper[4651]: I1011 05:09:00.118076 4651 generic.go:334] "Generic (PLEG): container finished" podID="0c69b234-aa8e-440b-b730-b901ebe0a7b1" containerID="3c0f85abdf531f0692e26e985d330571d01a76caaf6618b4aca879dd281d80f2" exitCode=0 Oct 11 05:09:00 crc kubenswrapper[4651]: I1011 05:09:00.118135 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0c69b234-aa8e-440b-b730-b901ebe0a7b1","Type":"ContainerDied","Data":"d9c59e5c3243b58d6045dcae171ee06c82633ecf88e770bd30814b5938e504d2"} Oct 11 05:09:00 crc kubenswrapper[4651]: I1011 05:09:00.118164 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0c69b234-aa8e-440b-b730-b901ebe0a7b1","Type":"ContainerDied","Data":"d851878bbb579eae5133c54b976d0f45d2db8027ecb879e9dfa2974147c2688f"} Oct 11 05:09:00 crc kubenswrapper[4651]: I1011 05:09:00.118174 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0c69b234-aa8e-440b-b730-b901ebe0a7b1","Type":"ContainerDied","Data":"3c0f85abdf531f0692e26e985d330571d01a76caaf6618b4aca879dd281d80f2"} Oct 11 05:09:00 crc kubenswrapper[4651]: I1011 05:09:00.122832 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-695465f8bc-lbxbx" event={"ID":"e8259a36-62ec-4cdc-b377-3574bf0bead5","Type":"ContainerStarted","Data":"c473027c07c94d7c9b71b89c239d331ef4fe2b611c21ef9a6160c4bb37b8eaa0"} Oct 11 05:09:00 crc kubenswrapper[4651]: I1011 05:09:00.122860 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-695465f8bc-lbxbx" event={"ID":"e8259a36-62ec-4cdc-b377-3574bf0bead5","Type":"ContainerStarted","Data":"7f1bd87faa74f5e5cfb60803c1f013f58403a85b85547eaa3cb7e7c4d6e25f08"} Oct 11 05:09:00 crc kubenswrapper[4651]: I1011 05:09:00.122870 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-695465f8bc-lbxbx" event={"ID":"e8259a36-62ec-4cdc-b377-3574bf0bead5","Type":"ContainerStarted","Data":"5d514678425822d0ff13d9726a446884510fdfa7c81a4372ab1bc09b0d8e53f0"} Oct 11 05:09:00 crc kubenswrapper[4651]: I1011 05:09:00.123780 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-695465f8bc-lbxbx" Oct 11 05:09:00 crc kubenswrapper[4651]: I1011 05:09:00.123879 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-695465f8bc-lbxbx" Oct 11 05:09:00 crc kubenswrapper[4651]: I1011 05:09:00.149966 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-proxy-695465f8bc-lbxbx" podStartSLOduration=2.1499471 podStartE2EDuration="2.1499471s" podCreationTimestamp="2025-10-11 05:08:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 05:09:00.144106061 +0000 UTC m=+1061.040338857" watchObservedRunningTime="2025-10-11 05:09:00.1499471 +0000 UTC m=+1061.046179896" Oct 11 05:09:00 crc kubenswrapper[4651]: I1011 05:09:00.362759 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-74cbfd888-nqwlq" Oct 11 05:09:02 crc kubenswrapper[4651]: I1011 05:09:02.153507 4651 generic.go:334] "Generic (PLEG): container finished" podID="0c69b234-aa8e-440b-b730-b901ebe0a7b1" containerID="87d142c83cbdde5c738e3a4deb5da1b53b953facf14e583ff63f3187908e6c4c" exitCode=0 Oct 11 05:09:02 crc kubenswrapper[4651]: I1011 05:09:02.153605 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0c69b234-aa8e-440b-b730-b901ebe0a7b1","Type":"ContainerDied","Data":"87d142c83cbdde5c738e3a4deb5da1b53b953facf14e583ff63f3187908e6c4c"} Oct 11 05:09:02 crc kubenswrapper[4651]: I1011 05:09:02.927349 4651 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="0c69b234-aa8e-440b-b730-b901ebe0a7b1" containerName="proxy-httpd" probeResult="failure" output="Get \"http://10.217.0.165:3000/\": dial tcp 10.217.0.165:3000: connect: connection refused" Oct 11 05:09:03 crc kubenswrapper[4651]: I1011 05:09:03.589187 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Oct 11 05:09:04 crc kubenswrapper[4651]: I1011 05:09:04.879562 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 11 05:09:04 crc kubenswrapper[4651]: I1011 05:09:04.879762 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="24cb59c7-7c18-42ef-9e4e-a9cad024bf49" containerName="kube-state-metrics" containerID="cri-o://0f70749cf13488323f896ca1de8b2914c159624773deed1b202bfe91c06a1ab8" gracePeriod=30 Oct 11 05:09:05 crc kubenswrapper[4651]: I1011 05:09:05.183394 4651 generic.go:334] "Generic (PLEG): container finished" podID="24cb59c7-7c18-42ef-9e4e-a9cad024bf49" containerID="0f70749cf13488323f896ca1de8b2914c159624773deed1b202bfe91c06a1ab8" exitCode=2 Oct 11 05:09:05 crc kubenswrapper[4651]: I1011 05:09:05.183483 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"24cb59c7-7c18-42ef-9e4e-a9cad024bf49","Type":"ContainerDied","Data":"0f70749cf13488323f896ca1de8b2914c159624773deed1b202bfe91c06a1ab8"} Oct 11 05:09:06 crc kubenswrapper[4651]: I1011 05:09:06.194161 4651 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-68976f6bc6-9jl66" podUID="93e6a5fd-218d-44c2-9bf9-2796b1ff0c33" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.150:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.150:8443: connect: connection refused" Oct 11 05:09:06 crc kubenswrapper[4651]: I1011 05:09:06.194352 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-68976f6bc6-9jl66" Oct 11 05:09:06 crc kubenswrapper[4651]: I1011 05:09:06.229899 4651 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/kube-state-metrics-0" podUID="24cb59c7-7c18-42ef-9e4e-a9cad024bf49" containerName="kube-state-metrics" probeResult="failure" output="Get \"http://10.217.0.105:8081/readyz\": dial tcp 10.217.0.105:8081: connect: connection refused" Oct 11 05:09:06 crc kubenswrapper[4651]: I1011 05:09:06.585235 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 11 05:09:06 crc kubenswrapper[4651]: I1011 05:09:06.633213 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h8h7t\" (UniqueName: \"kubernetes.io/projected/24cb59c7-7c18-42ef-9e4e-a9cad024bf49-kube-api-access-h8h7t\") pod \"24cb59c7-7c18-42ef-9e4e-a9cad024bf49\" (UID: \"24cb59c7-7c18-42ef-9e4e-a9cad024bf49\") " Oct 11 05:09:06 crc kubenswrapper[4651]: I1011 05:09:06.637618 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/24cb59c7-7c18-42ef-9e4e-a9cad024bf49-kube-api-access-h8h7t" (OuterVolumeSpecName: "kube-api-access-h8h7t") pod "24cb59c7-7c18-42ef-9e4e-a9cad024bf49" (UID: "24cb59c7-7c18-42ef-9e4e-a9cad024bf49"). InnerVolumeSpecName "kube-api-access-h8h7t". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:09:06 crc kubenswrapper[4651]: I1011 05:09:06.646030 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 11 05:09:06 crc kubenswrapper[4651]: I1011 05:09:06.735034 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0c69b234-aa8e-440b-b730-b901ebe0a7b1-log-httpd\") pod \"0c69b234-aa8e-440b-b730-b901ebe0a7b1\" (UID: \"0c69b234-aa8e-440b-b730-b901ebe0a7b1\") " Oct 11 05:09:06 crc kubenswrapper[4651]: I1011 05:09:06.735156 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0c69b234-aa8e-440b-b730-b901ebe0a7b1-combined-ca-bundle\") pod \"0c69b234-aa8e-440b-b730-b901ebe0a7b1\" (UID: \"0c69b234-aa8e-440b-b730-b901ebe0a7b1\") " Oct 11 05:09:06 crc kubenswrapper[4651]: I1011 05:09:06.735325 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0c69b234-aa8e-440b-b730-b901ebe0a7b1-scripts\") pod \"0c69b234-aa8e-440b-b730-b901ebe0a7b1\" (UID: \"0c69b234-aa8e-440b-b730-b901ebe0a7b1\") " Oct 11 05:09:06 crc kubenswrapper[4651]: I1011 05:09:06.735359 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0c69b234-aa8e-440b-b730-b901ebe0a7b1-config-data\") pod \"0c69b234-aa8e-440b-b730-b901ebe0a7b1\" (UID: \"0c69b234-aa8e-440b-b730-b901ebe0a7b1\") " Oct 11 05:09:06 crc kubenswrapper[4651]: I1011 05:09:06.735386 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0c69b234-aa8e-440b-b730-b901ebe0a7b1-run-httpd\") pod \"0c69b234-aa8e-440b-b730-b901ebe0a7b1\" (UID: \"0c69b234-aa8e-440b-b730-b901ebe0a7b1\") " Oct 11 05:09:06 crc kubenswrapper[4651]: I1011 05:09:06.735495 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0c69b234-aa8e-440b-b730-b901ebe0a7b1-sg-core-conf-yaml\") pod \"0c69b234-aa8e-440b-b730-b901ebe0a7b1\" (UID: \"0c69b234-aa8e-440b-b730-b901ebe0a7b1\") " Oct 11 05:09:06 crc kubenswrapper[4651]: I1011 05:09:06.735519 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5j2d4\" (UniqueName: \"kubernetes.io/projected/0c69b234-aa8e-440b-b730-b901ebe0a7b1-kube-api-access-5j2d4\") pod \"0c69b234-aa8e-440b-b730-b901ebe0a7b1\" (UID: \"0c69b234-aa8e-440b-b730-b901ebe0a7b1\") " Oct 11 05:09:06 crc kubenswrapper[4651]: I1011 05:09:06.735680 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0c69b234-aa8e-440b-b730-b901ebe0a7b1-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "0c69b234-aa8e-440b-b730-b901ebe0a7b1" (UID: "0c69b234-aa8e-440b-b730-b901ebe0a7b1"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 05:09:06 crc kubenswrapper[4651]: I1011 05:09:06.736018 4651 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0c69b234-aa8e-440b-b730-b901ebe0a7b1-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 11 05:09:06 crc kubenswrapper[4651]: I1011 05:09:06.736042 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h8h7t\" (UniqueName: \"kubernetes.io/projected/24cb59c7-7c18-42ef-9e4e-a9cad024bf49-kube-api-access-h8h7t\") on node \"crc\" DevicePath \"\"" Oct 11 05:09:06 crc kubenswrapper[4651]: I1011 05:09:06.736303 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0c69b234-aa8e-440b-b730-b901ebe0a7b1-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "0c69b234-aa8e-440b-b730-b901ebe0a7b1" (UID: "0c69b234-aa8e-440b-b730-b901ebe0a7b1"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 05:09:06 crc kubenswrapper[4651]: I1011 05:09:06.739649 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0c69b234-aa8e-440b-b730-b901ebe0a7b1-scripts" (OuterVolumeSpecName: "scripts") pod "0c69b234-aa8e-440b-b730-b901ebe0a7b1" (UID: "0c69b234-aa8e-440b-b730-b901ebe0a7b1"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:09:06 crc kubenswrapper[4651]: I1011 05:09:06.742914 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0c69b234-aa8e-440b-b730-b901ebe0a7b1-kube-api-access-5j2d4" (OuterVolumeSpecName: "kube-api-access-5j2d4") pod "0c69b234-aa8e-440b-b730-b901ebe0a7b1" (UID: "0c69b234-aa8e-440b-b730-b901ebe0a7b1"). InnerVolumeSpecName "kube-api-access-5j2d4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:09:06 crc kubenswrapper[4651]: I1011 05:09:06.769776 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0c69b234-aa8e-440b-b730-b901ebe0a7b1-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "0c69b234-aa8e-440b-b730-b901ebe0a7b1" (UID: "0c69b234-aa8e-440b-b730-b901ebe0a7b1"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:09:06 crc kubenswrapper[4651]: I1011 05:09:06.819751 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0c69b234-aa8e-440b-b730-b901ebe0a7b1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0c69b234-aa8e-440b-b730-b901ebe0a7b1" (UID: "0c69b234-aa8e-440b-b730-b901ebe0a7b1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:09:06 crc kubenswrapper[4651]: I1011 05:09:06.837566 4651 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0c69b234-aa8e-440b-b730-b901ebe0a7b1-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 11 05:09:06 crc kubenswrapper[4651]: I1011 05:09:06.837937 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5j2d4\" (UniqueName: \"kubernetes.io/projected/0c69b234-aa8e-440b-b730-b901ebe0a7b1-kube-api-access-5j2d4\") on node \"crc\" DevicePath \"\"" Oct 11 05:09:06 crc kubenswrapper[4651]: I1011 05:09:06.838055 4651 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0c69b234-aa8e-440b-b730-b901ebe0a7b1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 05:09:06 crc kubenswrapper[4651]: I1011 05:09:06.838136 4651 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0c69b234-aa8e-440b-b730-b901ebe0a7b1-scripts\") on node \"crc\" DevicePath \"\"" Oct 11 05:09:06 crc kubenswrapper[4651]: I1011 05:09:06.838213 4651 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0c69b234-aa8e-440b-b730-b901ebe0a7b1-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 11 05:09:06 crc kubenswrapper[4651]: I1011 05:09:06.866752 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0c69b234-aa8e-440b-b730-b901ebe0a7b1-config-data" (OuterVolumeSpecName: "config-data") pod "0c69b234-aa8e-440b-b730-b901ebe0a7b1" (UID: "0c69b234-aa8e-440b-b730-b901ebe0a7b1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:09:06 crc kubenswrapper[4651]: I1011 05:09:06.943532 4651 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0c69b234-aa8e-440b-b730-b901ebe0a7b1-config-data\") on node \"crc\" DevicePath \"\"" Oct 11 05:09:07 crc kubenswrapper[4651]: I1011 05:09:07.209838 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"47aacf69-f6eb-4e85-9b70-8f241bfa812f","Type":"ContainerStarted","Data":"822df1aa79b48cd38a43ddb3e8086e290a2701f7fc076c5d54fefe735ec4cc43"} Oct 11 05:09:07 crc kubenswrapper[4651]: I1011 05:09:07.212955 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0c69b234-aa8e-440b-b730-b901ebe0a7b1","Type":"ContainerDied","Data":"c742b51a7c237adb031758a21c14952de2fb86827e756eb22a9051967c5fd82e"} Oct 11 05:09:07 crc kubenswrapper[4651]: I1011 05:09:07.212999 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 11 05:09:07 crc kubenswrapper[4651]: I1011 05:09:07.213032 4651 scope.go:117] "RemoveContainer" containerID="d9c59e5c3243b58d6045dcae171ee06c82633ecf88e770bd30814b5938e504d2" Oct 11 05:09:07 crc kubenswrapper[4651]: I1011 05:09:07.216427 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"24cb59c7-7c18-42ef-9e4e-a9cad024bf49","Type":"ContainerDied","Data":"699b0cee8260d752b55acff090e30914e841d78d619df3b21035971f1d55fe12"} Oct 11 05:09:07 crc kubenswrapper[4651]: I1011 05:09:07.216524 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 11 05:09:07 crc kubenswrapper[4651]: I1011 05:09:07.230349 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=2.586319205 podStartE2EDuration="12.230334589s" podCreationTimestamp="2025-10-11 05:08:55 +0000 UTC" firstStartedPulling="2025-10-11 05:08:56.680919096 +0000 UTC m=+1057.577151892" lastFinishedPulling="2025-10-11 05:09:06.32493446 +0000 UTC m=+1067.221167276" observedRunningTime="2025-10-11 05:09:07.228903382 +0000 UTC m=+1068.125136208" watchObservedRunningTime="2025-10-11 05:09:07.230334589 +0000 UTC m=+1068.126567375" Oct 11 05:09:07 crc kubenswrapper[4651]: I1011 05:09:07.260888 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 11 05:09:07 crc kubenswrapper[4651]: I1011 05:09:07.270587 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 11 05:09:07 crc kubenswrapper[4651]: I1011 05:09:07.280351 4651 scope.go:117] "RemoveContainer" containerID="d851878bbb579eae5133c54b976d0f45d2db8027ecb879e9dfa2974147c2688f" Oct 11 05:09:07 crc kubenswrapper[4651]: I1011 05:09:07.298564 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Oct 11 05:09:07 crc kubenswrapper[4651]: E1011 05:09:07.299268 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0c69b234-aa8e-440b-b730-b901ebe0a7b1" containerName="proxy-httpd" Oct 11 05:09:07 crc kubenswrapper[4651]: I1011 05:09:07.299290 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="0c69b234-aa8e-440b-b730-b901ebe0a7b1" containerName="proxy-httpd" Oct 11 05:09:07 crc kubenswrapper[4651]: E1011 05:09:07.299311 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="24cb59c7-7c18-42ef-9e4e-a9cad024bf49" containerName="kube-state-metrics" Oct 11 05:09:07 crc kubenswrapper[4651]: I1011 05:09:07.299317 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="24cb59c7-7c18-42ef-9e4e-a9cad024bf49" containerName="kube-state-metrics" Oct 11 05:09:07 crc kubenswrapper[4651]: E1011 05:09:07.299338 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0c69b234-aa8e-440b-b730-b901ebe0a7b1" containerName="sg-core" Oct 11 05:09:07 crc kubenswrapper[4651]: I1011 05:09:07.299346 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="0c69b234-aa8e-440b-b730-b901ebe0a7b1" containerName="sg-core" Oct 11 05:09:07 crc kubenswrapper[4651]: E1011 05:09:07.299358 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0c69b234-aa8e-440b-b730-b901ebe0a7b1" containerName="ceilometer-central-agent" Oct 11 05:09:07 crc kubenswrapper[4651]: I1011 05:09:07.299364 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="0c69b234-aa8e-440b-b730-b901ebe0a7b1" containerName="ceilometer-central-agent" Oct 11 05:09:07 crc kubenswrapper[4651]: E1011 05:09:07.299378 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0c69b234-aa8e-440b-b730-b901ebe0a7b1" containerName="ceilometer-notification-agent" Oct 11 05:09:07 crc kubenswrapper[4651]: I1011 05:09:07.299384 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="0c69b234-aa8e-440b-b730-b901ebe0a7b1" containerName="ceilometer-notification-agent" Oct 11 05:09:07 crc kubenswrapper[4651]: I1011 05:09:07.299546 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="0c69b234-aa8e-440b-b730-b901ebe0a7b1" containerName="proxy-httpd" Oct 11 05:09:07 crc kubenswrapper[4651]: I1011 05:09:07.299562 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="0c69b234-aa8e-440b-b730-b901ebe0a7b1" containerName="ceilometer-central-agent" Oct 11 05:09:07 crc kubenswrapper[4651]: I1011 05:09:07.299572 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="0c69b234-aa8e-440b-b730-b901ebe0a7b1" containerName="sg-core" Oct 11 05:09:07 crc kubenswrapper[4651]: I1011 05:09:07.299585 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="24cb59c7-7c18-42ef-9e4e-a9cad024bf49" containerName="kube-state-metrics" Oct 11 05:09:07 crc kubenswrapper[4651]: I1011 05:09:07.299600 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="0c69b234-aa8e-440b-b730-b901ebe0a7b1" containerName="ceilometer-notification-agent" Oct 11 05:09:07 crc kubenswrapper[4651]: I1011 05:09:07.300229 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 11 05:09:07 crc kubenswrapper[4651]: I1011 05:09:07.303765 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Oct 11 05:09:07 crc kubenswrapper[4651]: I1011 05:09:07.304021 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Oct 11 05:09:07 crc kubenswrapper[4651]: I1011 05:09:07.304212 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-bspfg" Oct 11 05:09:07 crc kubenswrapper[4651]: I1011 05:09:07.305980 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 11 05:09:07 crc kubenswrapper[4651]: I1011 05:09:07.344599 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 11 05:09:07 crc kubenswrapper[4651]: I1011 05:09:07.344939 4651 scope.go:117] "RemoveContainer" containerID="87d142c83cbdde5c738e3a4deb5da1b53b953facf14e583ff63f3187908e6c4c" Oct 11 05:09:07 crc kubenswrapper[4651]: I1011 05:09:07.349860 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/4fbfad70-f21a-4362-9b53-c955b9cca958-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"4fbfad70-f21a-4362-9b53-c955b9cca958\") " pod="openstack/kube-state-metrics-0" Oct 11 05:09:07 crc kubenswrapper[4651]: I1011 05:09:07.349931 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/4fbfad70-f21a-4362-9b53-c955b9cca958-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"4fbfad70-f21a-4362-9b53-c955b9cca958\") " pod="openstack/kube-state-metrics-0" Oct 11 05:09:07 crc kubenswrapper[4651]: I1011 05:09:07.349979 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4fbfad70-f21a-4362-9b53-c955b9cca958-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"4fbfad70-f21a-4362-9b53-c955b9cca958\") " pod="openstack/kube-state-metrics-0" Oct 11 05:09:07 crc kubenswrapper[4651]: I1011 05:09:07.349994 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dktqb\" (UniqueName: \"kubernetes.io/projected/4fbfad70-f21a-4362-9b53-c955b9cca958-kube-api-access-dktqb\") pod \"kube-state-metrics-0\" (UID: \"4fbfad70-f21a-4362-9b53-c955b9cca958\") " pod="openstack/kube-state-metrics-0" Oct 11 05:09:07 crc kubenswrapper[4651]: I1011 05:09:07.361484 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 11 05:09:07 crc kubenswrapper[4651]: I1011 05:09:07.390231 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 11 05:09:07 crc kubenswrapper[4651]: I1011 05:09:07.400594 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 11 05:09:07 crc kubenswrapper[4651]: I1011 05:09:07.401605 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 11 05:09:07 crc kubenswrapper[4651]: I1011 05:09:07.403174 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 11 05:09:07 crc kubenswrapper[4651]: I1011 05:09:07.404473 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Oct 11 05:09:07 crc kubenswrapper[4651]: I1011 05:09:07.404505 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 11 05:09:07 crc kubenswrapper[4651]: I1011 05:09:07.410050 4651 scope.go:117] "RemoveContainer" containerID="3c0f85abdf531f0692e26e985d330571d01a76caaf6618b4aca879dd281d80f2" Oct 11 05:09:07 crc kubenswrapper[4651]: I1011 05:09:07.430270 4651 scope.go:117] "RemoveContainer" containerID="0f70749cf13488323f896ca1de8b2914c159624773deed1b202bfe91c06a1ab8" Oct 11 05:09:07 crc kubenswrapper[4651]: I1011 05:09:07.459942 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pzmht\" (UniqueName: \"kubernetes.io/projected/e85344ea-aeff-4826-bcd0-a4ea659a592c-kube-api-access-pzmht\") pod \"ceilometer-0\" (UID: \"e85344ea-aeff-4826-bcd0-a4ea659a592c\") " pod="openstack/ceilometer-0" Oct 11 05:09:07 crc kubenswrapper[4651]: I1011 05:09:07.459983 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e85344ea-aeff-4826-bcd0-a4ea659a592c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e85344ea-aeff-4826-bcd0-a4ea659a592c\") " pod="openstack/ceilometer-0" Oct 11 05:09:07 crc kubenswrapper[4651]: I1011 05:09:07.460066 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/4fbfad70-f21a-4362-9b53-c955b9cca958-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"4fbfad70-f21a-4362-9b53-c955b9cca958\") " pod="openstack/kube-state-metrics-0" Oct 11 05:09:07 crc kubenswrapper[4651]: I1011 05:09:07.460124 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/e85344ea-aeff-4826-bcd0-a4ea659a592c-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"e85344ea-aeff-4826-bcd0-a4ea659a592c\") " pod="openstack/ceilometer-0" Oct 11 05:09:07 crc kubenswrapper[4651]: I1011 05:09:07.460166 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/4fbfad70-f21a-4362-9b53-c955b9cca958-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"4fbfad70-f21a-4362-9b53-c955b9cca958\") " pod="openstack/kube-state-metrics-0" Oct 11 05:09:07 crc kubenswrapper[4651]: I1011 05:09:07.460189 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e85344ea-aeff-4826-bcd0-a4ea659a592c-scripts\") pod \"ceilometer-0\" (UID: \"e85344ea-aeff-4826-bcd0-a4ea659a592c\") " pod="openstack/ceilometer-0" Oct 11 05:09:07 crc kubenswrapper[4651]: I1011 05:09:07.460210 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e85344ea-aeff-4826-bcd0-a4ea659a592c-config-data\") pod \"ceilometer-0\" (UID: \"e85344ea-aeff-4826-bcd0-a4ea659a592c\") " pod="openstack/ceilometer-0" Oct 11 05:09:07 crc kubenswrapper[4651]: I1011 05:09:07.460263 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4fbfad70-f21a-4362-9b53-c955b9cca958-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"4fbfad70-f21a-4362-9b53-c955b9cca958\") " pod="openstack/kube-state-metrics-0" Oct 11 05:09:07 crc kubenswrapper[4651]: I1011 05:09:07.460286 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dktqb\" (UniqueName: \"kubernetes.io/projected/4fbfad70-f21a-4362-9b53-c955b9cca958-kube-api-access-dktqb\") pod \"kube-state-metrics-0\" (UID: \"4fbfad70-f21a-4362-9b53-c955b9cca958\") " pod="openstack/kube-state-metrics-0" Oct 11 05:09:07 crc kubenswrapper[4651]: I1011 05:09:07.462471 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e85344ea-aeff-4826-bcd0-a4ea659a592c-log-httpd\") pod \"ceilometer-0\" (UID: \"e85344ea-aeff-4826-bcd0-a4ea659a592c\") " pod="openstack/ceilometer-0" Oct 11 05:09:07 crc kubenswrapper[4651]: I1011 05:09:07.462792 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e85344ea-aeff-4826-bcd0-a4ea659a592c-run-httpd\") pod \"ceilometer-0\" (UID: \"e85344ea-aeff-4826-bcd0-a4ea659a592c\") " pod="openstack/ceilometer-0" Oct 11 05:09:07 crc kubenswrapper[4651]: I1011 05:09:07.462872 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e85344ea-aeff-4826-bcd0-a4ea659a592c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e85344ea-aeff-4826-bcd0-a4ea659a592c\") " pod="openstack/ceilometer-0" Oct 11 05:09:07 crc kubenswrapper[4651]: I1011 05:09:07.464488 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/4fbfad70-f21a-4362-9b53-c955b9cca958-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"4fbfad70-f21a-4362-9b53-c955b9cca958\") " pod="openstack/kube-state-metrics-0" Oct 11 05:09:07 crc kubenswrapper[4651]: I1011 05:09:07.465132 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4fbfad70-f21a-4362-9b53-c955b9cca958-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"4fbfad70-f21a-4362-9b53-c955b9cca958\") " pod="openstack/kube-state-metrics-0" Oct 11 05:09:07 crc kubenswrapper[4651]: I1011 05:09:07.465613 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/4fbfad70-f21a-4362-9b53-c955b9cca958-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"4fbfad70-f21a-4362-9b53-c955b9cca958\") " pod="openstack/kube-state-metrics-0" Oct 11 05:09:07 crc kubenswrapper[4651]: I1011 05:09:07.484427 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dktqb\" (UniqueName: \"kubernetes.io/projected/4fbfad70-f21a-4362-9b53-c955b9cca958-kube-api-access-dktqb\") pod \"kube-state-metrics-0\" (UID: \"4fbfad70-f21a-4362-9b53-c955b9cca958\") " pod="openstack/kube-state-metrics-0" Oct 11 05:09:07 crc kubenswrapper[4651]: I1011 05:09:07.564562 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e85344ea-aeff-4826-bcd0-a4ea659a592c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e85344ea-aeff-4826-bcd0-a4ea659a592c\") " pod="openstack/ceilometer-0" Oct 11 05:09:07 crc kubenswrapper[4651]: I1011 05:09:07.564672 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pzmht\" (UniqueName: \"kubernetes.io/projected/e85344ea-aeff-4826-bcd0-a4ea659a592c-kube-api-access-pzmht\") pod \"ceilometer-0\" (UID: \"e85344ea-aeff-4826-bcd0-a4ea659a592c\") " pod="openstack/ceilometer-0" Oct 11 05:09:07 crc kubenswrapper[4651]: I1011 05:09:07.564698 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e85344ea-aeff-4826-bcd0-a4ea659a592c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e85344ea-aeff-4826-bcd0-a4ea659a592c\") " pod="openstack/ceilometer-0" Oct 11 05:09:07 crc kubenswrapper[4651]: I1011 05:09:07.564901 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/e85344ea-aeff-4826-bcd0-a4ea659a592c-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"e85344ea-aeff-4826-bcd0-a4ea659a592c\") " pod="openstack/ceilometer-0" Oct 11 05:09:07 crc kubenswrapper[4651]: I1011 05:09:07.564954 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e85344ea-aeff-4826-bcd0-a4ea659a592c-scripts\") pod \"ceilometer-0\" (UID: \"e85344ea-aeff-4826-bcd0-a4ea659a592c\") " pod="openstack/ceilometer-0" Oct 11 05:09:07 crc kubenswrapper[4651]: I1011 05:09:07.564977 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e85344ea-aeff-4826-bcd0-a4ea659a592c-config-data\") pod \"ceilometer-0\" (UID: \"e85344ea-aeff-4826-bcd0-a4ea659a592c\") " pod="openstack/ceilometer-0" Oct 11 05:09:07 crc kubenswrapper[4651]: I1011 05:09:07.565079 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e85344ea-aeff-4826-bcd0-a4ea659a592c-run-httpd\") pod \"ceilometer-0\" (UID: \"e85344ea-aeff-4826-bcd0-a4ea659a592c\") " pod="openstack/ceilometer-0" Oct 11 05:09:07 crc kubenswrapper[4651]: I1011 05:09:07.565098 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e85344ea-aeff-4826-bcd0-a4ea659a592c-log-httpd\") pod \"ceilometer-0\" (UID: \"e85344ea-aeff-4826-bcd0-a4ea659a592c\") " pod="openstack/ceilometer-0" Oct 11 05:09:07 crc kubenswrapper[4651]: I1011 05:09:07.565619 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e85344ea-aeff-4826-bcd0-a4ea659a592c-log-httpd\") pod \"ceilometer-0\" (UID: \"e85344ea-aeff-4826-bcd0-a4ea659a592c\") " pod="openstack/ceilometer-0" Oct 11 05:09:07 crc kubenswrapper[4651]: I1011 05:09:07.565893 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e85344ea-aeff-4826-bcd0-a4ea659a592c-run-httpd\") pod \"ceilometer-0\" (UID: \"e85344ea-aeff-4826-bcd0-a4ea659a592c\") " pod="openstack/ceilometer-0" Oct 11 05:09:07 crc kubenswrapper[4651]: I1011 05:09:07.570509 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/e85344ea-aeff-4826-bcd0-a4ea659a592c-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"e85344ea-aeff-4826-bcd0-a4ea659a592c\") " pod="openstack/ceilometer-0" Oct 11 05:09:07 crc kubenswrapper[4651]: I1011 05:09:07.570627 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e85344ea-aeff-4826-bcd0-a4ea659a592c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e85344ea-aeff-4826-bcd0-a4ea659a592c\") " pod="openstack/ceilometer-0" Oct 11 05:09:07 crc kubenswrapper[4651]: I1011 05:09:07.570660 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e85344ea-aeff-4826-bcd0-a4ea659a592c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e85344ea-aeff-4826-bcd0-a4ea659a592c\") " pod="openstack/ceilometer-0" Oct 11 05:09:07 crc kubenswrapper[4651]: I1011 05:09:07.570691 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e85344ea-aeff-4826-bcd0-a4ea659a592c-config-data\") pod \"ceilometer-0\" (UID: \"e85344ea-aeff-4826-bcd0-a4ea659a592c\") " pod="openstack/ceilometer-0" Oct 11 05:09:07 crc kubenswrapper[4651]: I1011 05:09:07.571423 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e85344ea-aeff-4826-bcd0-a4ea659a592c-scripts\") pod \"ceilometer-0\" (UID: \"e85344ea-aeff-4826-bcd0-a4ea659a592c\") " pod="openstack/ceilometer-0" Oct 11 05:09:07 crc kubenswrapper[4651]: I1011 05:09:07.584669 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pzmht\" (UniqueName: \"kubernetes.io/projected/e85344ea-aeff-4826-bcd0-a4ea659a592c-kube-api-access-pzmht\") pod \"ceilometer-0\" (UID: \"e85344ea-aeff-4826-bcd0-a4ea659a592c\") " pod="openstack/ceilometer-0" Oct 11 05:09:07 crc kubenswrapper[4651]: I1011 05:09:07.665439 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 11 05:09:07 crc kubenswrapper[4651]: I1011 05:09:07.718626 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 11 05:09:07 crc kubenswrapper[4651]: I1011 05:09:07.884566 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0c69b234-aa8e-440b-b730-b901ebe0a7b1" path="/var/lib/kubelet/pods/0c69b234-aa8e-440b-b730-b901ebe0a7b1/volumes" Oct 11 05:09:07 crc kubenswrapper[4651]: I1011 05:09:07.886047 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="24cb59c7-7c18-42ef-9e4e-a9cad024bf49" path="/var/lib/kubelet/pods/24cb59c7-7c18-42ef-9e4e-a9cad024bf49/volumes" Oct 11 05:09:08 crc kubenswrapper[4651]: I1011 05:09:08.114938 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 11 05:09:08 crc kubenswrapper[4651]: I1011 05:09:08.195105 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 11 05:09:08 crc kubenswrapper[4651]: I1011 05:09:08.229799 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e85344ea-aeff-4826-bcd0-a4ea659a592c","Type":"ContainerStarted","Data":"b70fead4ab177ec3596918380087614dcad4b1b0a5962362be9c155ae2409e32"} Oct 11 05:09:08 crc kubenswrapper[4651]: I1011 05:09:08.232267 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"4fbfad70-f21a-4362-9b53-c955b9cca958","Type":"ContainerStarted","Data":"c6903f661ee59673ff41a458c608dcd1f94809b7d33dd1b44f280c9eb3e63e79"} Oct 11 05:09:08 crc kubenswrapper[4651]: I1011 05:09:08.409945 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 11 05:09:08 crc kubenswrapper[4651]: I1011 05:09:08.944977 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-695465f8bc-lbxbx" Oct 11 05:09:08 crc kubenswrapper[4651]: I1011 05:09:08.946088 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-695465f8bc-lbxbx" Oct 11 05:09:09 crc kubenswrapper[4651]: I1011 05:09:09.243687 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e85344ea-aeff-4826-bcd0-a4ea659a592c","Type":"ContainerStarted","Data":"c1f0e810c70514e3326d0ce0ca8ce2e3ab173b430557087c1005f87d442e022c"} Oct 11 05:09:09 crc kubenswrapper[4651]: I1011 05:09:09.246233 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"4fbfad70-f21a-4362-9b53-c955b9cca958","Type":"ContainerStarted","Data":"02802094ec511392eb3392fb4fb06978dbcf45d2a6be16338b46b85875fabf0a"} Oct 11 05:09:09 crc kubenswrapper[4651]: I1011 05:09:09.266544 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=1.925123444 podStartE2EDuration="2.266528662s" podCreationTimestamp="2025-10-11 05:09:07 +0000 UTC" firstStartedPulling="2025-10-11 05:09:08.119031022 +0000 UTC m=+1069.015263828" lastFinishedPulling="2025-10-11 05:09:08.46043625 +0000 UTC m=+1069.356669046" observedRunningTime="2025-10-11 05:09:09.261555225 +0000 UTC m=+1070.157788031" watchObservedRunningTime="2025-10-11 05:09:09.266528662 +0000 UTC m=+1070.162761458" Oct 11 05:09:10 crc kubenswrapper[4651]: I1011 05:09:10.268034 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e85344ea-aeff-4826-bcd0-a4ea659a592c","Type":"ContainerStarted","Data":"925e800403fbb9cbdc3a63317b61d075ff4f00baa4f06d486513f69dac15521a"} Oct 11 05:09:10 crc kubenswrapper[4651]: I1011 05:09:10.268625 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e85344ea-aeff-4826-bcd0-a4ea659a592c","Type":"ContainerStarted","Data":"fb264949f0255cb352656cf94c7608050521ad3b67f420f1425549dc8ef8b1bf"} Oct 11 05:09:10 crc kubenswrapper[4651]: I1011 05:09:10.268651 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Oct 11 05:09:12 crc kubenswrapper[4651]: I1011 05:09:12.303144 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e85344ea-aeff-4826-bcd0-a4ea659a592c","Type":"ContainerStarted","Data":"aa2590bee7fb46cda2d6db3578c059b4dd24d7432aac82d817e38fe23cec4afe"} Oct 11 05:09:12 crc kubenswrapper[4651]: I1011 05:09:12.303483 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 11 05:09:12 crc kubenswrapper[4651]: I1011 05:09:12.303357 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e85344ea-aeff-4826-bcd0-a4ea659a592c" containerName="ceilometer-notification-agent" containerID="cri-o://fb264949f0255cb352656cf94c7608050521ad3b67f420f1425549dc8ef8b1bf" gracePeriod=30 Oct 11 05:09:12 crc kubenswrapper[4651]: I1011 05:09:12.303266 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e85344ea-aeff-4826-bcd0-a4ea659a592c" containerName="ceilometer-central-agent" containerID="cri-o://c1f0e810c70514e3326d0ce0ca8ce2e3ab173b430557087c1005f87d442e022c" gracePeriod=30 Oct 11 05:09:12 crc kubenswrapper[4651]: I1011 05:09:12.303353 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e85344ea-aeff-4826-bcd0-a4ea659a592c" containerName="proxy-httpd" containerID="cri-o://aa2590bee7fb46cda2d6db3578c059b4dd24d7432aac82d817e38fe23cec4afe" gracePeriod=30 Oct 11 05:09:12 crc kubenswrapper[4651]: I1011 05:09:12.303395 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e85344ea-aeff-4826-bcd0-a4ea659a592c" containerName="sg-core" containerID="cri-o://925e800403fbb9cbdc3a63317b61d075ff4f00baa4f06d486513f69dac15521a" gracePeriod=30 Oct 11 05:09:12 crc kubenswrapper[4651]: I1011 05:09:12.314830 4651 generic.go:334] "Generic (PLEG): container finished" podID="93e6a5fd-218d-44c2-9bf9-2796b1ff0c33" containerID="321c90e912a2b24f9c5662f28a92c8cc11f803ff3e2b862071cbf01111dbe4be" exitCode=137 Oct 11 05:09:12 crc kubenswrapper[4651]: I1011 05:09:12.314866 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-68976f6bc6-9jl66" event={"ID":"93e6a5fd-218d-44c2-9bf9-2796b1ff0c33","Type":"ContainerDied","Data":"321c90e912a2b24f9c5662f28a92c8cc11f803ff3e2b862071cbf01111dbe4be"} Oct 11 05:09:12 crc kubenswrapper[4651]: I1011 05:09:12.336533 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.308150419 podStartE2EDuration="5.336516611s" podCreationTimestamp="2025-10-11 05:09:07 +0000 UTC" firstStartedPulling="2025-10-11 05:09:08.195638481 +0000 UTC m=+1069.091871277" lastFinishedPulling="2025-10-11 05:09:11.224004683 +0000 UTC m=+1072.120237469" observedRunningTime="2025-10-11 05:09:12.33527773 +0000 UTC m=+1073.231510536" watchObservedRunningTime="2025-10-11 05:09:12.336516611 +0000 UTC m=+1073.232749407" Oct 11 05:09:13 crc kubenswrapper[4651]: I1011 05:09:13.324646 4651 generic.go:334] "Generic (PLEG): container finished" podID="e85344ea-aeff-4826-bcd0-a4ea659a592c" containerID="aa2590bee7fb46cda2d6db3578c059b4dd24d7432aac82d817e38fe23cec4afe" exitCode=0 Oct 11 05:09:13 crc kubenswrapper[4651]: I1011 05:09:13.324990 4651 generic.go:334] "Generic (PLEG): container finished" podID="e85344ea-aeff-4826-bcd0-a4ea659a592c" containerID="925e800403fbb9cbdc3a63317b61d075ff4f00baa4f06d486513f69dac15521a" exitCode=2 Oct 11 05:09:13 crc kubenswrapper[4651]: I1011 05:09:13.324999 4651 generic.go:334] "Generic (PLEG): container finished" podID="e85344ea-aeff-4826-bcd0-a4ea659a592c" containerID="fb264949f0255cb352656cf94c7608050521ad3b67f420f1425549dc8ef8b1bf" exitCode=0 Oct 11 05:09:13 crc kubenswrapper[4651]: I1011 05:09:13.325018 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e85344ea-aeff-4826-bcd0-a4ea659a592c","Type":"ContainerDied","Data":"aa2590bee7fb46cda2d6db3578c059b4dd24d7432aac82d817e38fe23cec4afe"} Oct 11 05:09:13 crc kubenswrapper[4651]: I1011 05:09:13.325041 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e85344ea-aeff-4826-bcd0-a4ea659a592c","Type":"ContainerDied","Data":"925e800403fbb9cbdc3a63317b61d075ff4f00baa4f06d486513f69dac15521a"} Oct 11 05:09:13 crc kubenswrapper[4651]: I1011 05:09:13.325051 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e85344ea-aeff-4826-bcd0-a4ea659a592c","Type":"ContainerDied","Data":"fb264949f0255cb352656cf94c7608050521ad3b67f420f1425549dc8ef8b1bf"} Oct 11 05:09:13 crc kubenswrapper[4651]: I1011 05:09:13.577618 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-68976f6bc6-9jl66" Oct 11 05:09:13 crc kubenswrapper[4651]: I1011 05:09:13.688040 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/93e6a5fd-218d-44c2-9bf9-2796b1ff0c33-horizon-tls-certs\") pod \"93e6a5fd-218d-44c2-9bf9-2796b1ff0c33\" (UID: \"93e6a5fd-218d-44c2-9bf9-2796b1ff0c33\") " Oct 11 05:09:13 crc kubenswrapper[4651]: I1011 05:09:13.688090 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/93e6a5fd-218d-44c2-9bf9-2796b1ff0c33-scripts\") pod \"93e6a5fd-218d-44c2-9bf9-2796b1ff0c33\" (UID: \"93e6a5fd-218d-44c2-9bf9-2796b1ff0c33\") " Oct 11 05:09:13 crc kubenswrapper[4651]: I1011 05:09:13.688129 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/93e6a5fd-218d-44c2-9bf9-2796b1ff0c33-config-data\") pod \"93e6a5fd-218d-44c2-9bf9-2796b1ff0c33\" (UID: \"93e6a5fd-218d-44c2-9bf9-2796b1ff0c33\") " Oct 11 05:09:13 crc kubenswrapper[4651]: I1011 05:09:13.688185 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sz4x9\" (UniqueName: \"kubernetes.io/projected/93e6a5fd-218d-44c2-9bf9-2796b1ff0c33-kube-api-access-sz4x9\") pod \"93e6a5fd-218d-44c2-9bf9-2796b1ff0c33\" (UID: \"93e6a5fd-218d-44c2-9bf9-2796b1ff0c33\") " Oct 11 05:09:13 crc kubenswrapper[4651]: I1011 05:09:13.688255 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/93e6a5fd-218d-44c2-9bf9-2796b1ff0c33-logs\") pod \"93e6a5fd-218d-44c2-9bf9-2796b1ff0c33\" (UID: \"93e6a5fd-218d-44c2-9bf9-2796b1ff0c33\") " Oct 11 05:09:13 crc kubenswrapper[4651]: I1011 05:09:13.688308 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/93e6a5fd-218d-44c2-9bf9-2796b1ff0c33-horizon-secret-key\") pod \"93e6a5fd-218d-44c2-9bf9-2796b1ff0c33\" (UID: \"93e6a5fd-218d-44c2-9bf9-2796b1ff0c33\") " Oct 11 05:09:13 crc kubenswrapper[4651]: I1011 05:09:13.688333 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/93e6a5fd-218d-44c2-9bf9-2796b1ff0c33-combined-ca-bundle\") pod \"93e6a5fd-218d-44c2-9bf9-2796b1ff0c33\" (UID: \"93e6a5fd-218d-44c2-9bf9-2796b1ff0c33\") " Oct 11 05:09:13 crc kubenswrapper[4651]: I1011 05:09:13.688776 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/93e6a5fd-218d-44c2-9bf9-2796b1ff0c33-logs" (OuterVolumeSpecName: "logs") pod "93e6a5fd-218d-44c2-9bf9-2796b1ff0c33" (UID: "93e6a5fd-218d-44c2-9bf9-2796b1ff0c33"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 05:09:13 crc kubenswrapper[4651]: I1011 05:09:13.693777 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/93e6a5fd-218d-44c2-9bf9-2796b1ff0c33-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "93e6a5fd-218d-44c2-9bf9-2796b1ff0c33" (UID: "93e6a5fd-218d-44c2-9bf9-2796b1ff0c33"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:09:13 crc kubenswrapper[4651]: I1011 05:09:13.697166 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/93e6a5fd-218d-44c2-9bf9-2796b1ff0c33-kube-api-access-sz4x9" (OuterVolumeSpecName: "kube-api-access-sz4x9") pod "93e6a5fd-218d-44c2-9bf9-2796b1ff0c33" (UID: "93e6a5fd-218d-44c2-9bf9-2796b1ff0c33"). InnerVolumeSpecName "kube-api-access-sz4x9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:09:13 crc kubenswrapper[4651]: I1011 05:09:13.712000 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/93e6a5fd-218d-44c2-9bf9-2796b1ff0c33-config-data" (OuterVolumeSpecName: "config-data") pod "93e6a5fd-218d-44c2-9bf9-2796b1ff0c33" (UID: "93e6a5fd-218d-44c2-9bf9-2796b1ff0c33"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:09:13 crc kubenswrapper[4651]: I1011 05:09:13.713575 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/93e6a5fd-218d-44c2-9bf9-2796b1ff0c33-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "93e6a5fd-218d-44c2-9bf9-2796b1ff0c33" (UID: "93e6a5fd-218d-44c2-9bf9-2796b1ff0c33"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:09:13 crc kubenswrapper[4651]: I1011 05:09:13.722497 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/93e6a5fd-218d-44c2-9bf9-2796b1ff0c33-scripts" (OuterVolumeSpecName: "scripts") pod "93e6a5fd-218d-44c2-9bf9-2796b1ff0c33" (UID: "93e6a5fd-218d-44c2-9bf9-2796b1ff0c33"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:09:13 crc kubenswrapper[4651]: I1011 05:09:13.743723 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/93e6a5fd-218d-44c2-9bf9-2796b1ff0c33-horizon-tls-certs" (OuterVolumeSpecName: "horizon-tls-certs") pod "93e6a5fd-218d-44c2-9bf9-2796b1ff0c33" (UID: "93e6a5fd-218d-44c2-9bf9-2796b1ff0c33"). InnerVolumeSpecName "horizon-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:09:13 crc kubenswrapper[4651]: I1011 05:09:13.790034 4651 reconciler_common.go:293] "Volume detached for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/93e6a5fd-218d-44c2-9bf9-2796b1ff0c33-horizon-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 11 05:09:13 crc kubenswrapper[4651]: I1011 05:09:13.790065 4651 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/93e6a5fd-218d-44c2-9bf9-2796b1ff0c33-scripts\") on node \"crc\" DevicePath \"\"" Oct 11 05:09:13 crc kubenswrapper[4651]: I1011 05:09:13.790074 4651 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/93e6a5fd-218d-44c2-9bf9-2796b1ff0c33-config-data\") on node \"crc\" DevicePath \"\"" Oct 11 05:09:13 crc kubenswrapper[4651]: I1011 05:09:13.790083 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sz4x9\" (UniqueName: \"kubernetes.io/projected/93e6a5fd-218d-44c2-9bf9-2796b1ff0c33-kube-api-access-sz4x9\") on node \"crc\" DevicePath \"\"" Oct 11 05:09:13 crc kubenswrapper[4651]: I1011 05:09:13.790093 4651 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/93e6a5fd-218d-44c2-9bf9-2796b1ff0c33-logs\") on node \"crc\" DevicePath \"\"" Oct 11 05:09:13 crc kubenswrapper[4651]: I1011 05:09:13.790101 4651 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/93e6a5fd-218d-44c2-9bf9-2796b1ff0c33-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Oct 11 05:09:13 crc kubenswrapper[4651]: I1011 05:09:13.790109 4651 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/93e6a5fd-218d-44c2-9bf9-2796b1ff0c33-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 05:09:14 crc kubenswrapper[4651]: I1011 05:09:14.352942 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-68976f6bc6-9jl66" event={"ID":"93e6a5fd-218d-44c2-9bf9-2796b1ff0c33","Type":"ContainerDied","Data":"f5372438287e062570d4dc3eff8b3374de214d326c9efd0c419844b37f3d3141"} Oct 11 05:09:14 crc kubenswrapper[4651]: I1011 05:09:14.353014 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-68976f6bc6-9jl66" Oct 11 05:09:14 crc kubenswrapper[4651]: I1011 05:09:14.353127 4651 scope.go:117] "RemoveContainer" containerID="e808ba8643e65bfa96c6d2b0dba9c582de46d8fb985cecf451cd1bb241335b5b" Oct 11 05:09:14 crc kubenswrapper[4651]: I1011 05:09:14.389944 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-68976f6bc6-9jl66"] Oct 11 05:09:14 crc kubenswrapper[4651]: I1011 05:09:14.405784 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-68976f6bc6-9jl66"] Oct 11 05:09:14 crc kubenswrapper[4651]: I1011 05:09:14.555940 4651 scope.go:117] "RemoveContainer" containerID="321c90e912a2b24f9c5662f28a92c8cc11f803ff3e2b862071cbf01111dbe4be" Oct 11 05:09:14 crc kubenswrapper[4651]: I1011 05:09:14.947860 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 11 05:09:14 crc kubenswrapper[4651]: I1011 05:09:14.948202 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="6d67b157-9258-41f2-b942-bc5a54f1ea21" containerName="glance-log" containerID="cri-o://b35b779a90a8d682f090c62733e4802b34318f19145eca63e6fd686fa49970b1" gracePeriod=30 Oct 11 05:09:14 crc kubenswrapper[4651]: I1011 05:09:14.948336 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="6d67b157-9258-41f2-b942-bc5a54f1ea21" containerName="glance-httpd" containerID="cri-o://ad867022b00e0c78ace5afa4d483a112cefb1a74135162c9bbe0403ebcac754a" gracePeriod=30 Oct 11 05:09:15 crc kubenswrapper[4651]: I1011 05:09:15.367658 4651 generic.go:334] "Generic (PLEG): container finished" podID="6d67b157-9258-41f2-b942-bc5a54f1ea21" containerID="b35b779a90a8d682f090c62733e4802b34318f19145eca63e6fd686fa49970b1" exitCode=143 Oct 11 05:09:15 crc kubenswrapper[4651]: I1011 05:09:15.367748 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"6d67b157-9258-41f2-b942-bc5a54f1ea21","Type":"ContainerDied","Data":"b35b779a90a8d682f090c62733e4802b34318f19145eca63e6fd686fa49970b1"} Oct 11 05:09:15 crc kubenswrapper[4651]: I1011 05:09:15.802850 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 11 05:09:15 crc kubenswrapper[4651]: I1011 05:09:15.835076 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e85344ea-aeff-4826-bcd0-a4ea659a592c-combined-ca-bundle\") pod \"e85344ea-aeff-4826-bcd0-a4ea659a592c\" (UID: \"e85344ea-aeff-4826-bcd0-a4ea659a592c\") " Oct 11 05:09:15 crc kubenswrapper[4651]: I1011 05:09:15.835229 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e85344ea-aeff-4826-bcd0-a4ea659a592c-run-httpd\") pod \"e85344ea-aeff-4826-bcd0-a4ea659a592c\" (UID: \"e85344ea-aeff-4826-bcd0-a4ea659a592c\") " Oct 11 05:09:15 crc kubenswrapper[4651]: I1011 05:09:15.835251 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e85344ea-aeff-4826-bcd0-a4ea659a592c-sg-core-conf-yaml\") pod \"e85344ea-aeff-4826-bcd0-a4ea659a592c\" (UID: \"e85344ea-aeff-4826-bcd0-a4ea659a592c\") " Oct 11 05:09:15 crc kubenswrapper[4651]: I1011 05:09:15.835269 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e85344ea-aeff-4826-bcd0-a4ea659a592c-config-data\") pod \"e85344ea-aeff-4826-bcd0-a4ea659a592c\" (UID: \"e85344ea-aeff-4826-bcd0-a4ea659a592c\") " Oct 11 05:09:15 crc kubenswrapper[4651]: I1011 05:09:15.835305 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e85344ea-aeff-4826-bcd0-a4ea659a592c-log-httpd\") pod \"e85344ea-aeff-4826-bcd0-a4ea659a592c\" (UID: \"e85344ea-aeff-4826-bcd0-a4ea659a592c\") " Oct 11 05:09:15 crc kubenswrapper[4651]: I1011 05:09:15.835332 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pzmht\" (UniqueName: \"kubernetes.io/projected/e85344ea-aeff-4826-bcd0-a4ea659a592c-kube-api-access-pzmht\") pod \"e85344ea-aeff-4826-bcd0-a4ea659a592c\" (UID: \"e85344ea-aeff-4826-bcd0-a4ea659a592c\") " Oct 11 05:09:15 crc kubenswrapper[4651]: I1011 05:09:15.835356 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/e85344ea-aeff-4826-bcd0-a4ea659a592c-ceilometer-tls-certs\") pod \"e85344ea-aeff-4826-bcd0-a4ea659a592c\" (UID: \"e85344ea-aeff-4826-bcd0-a4ea659a592c\") " Oct 11 05:09:15 crc kubenswrapper[4651]: I1011 05:09:15.835373 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e85344ea-aeff-4826-bcd0-a4ea659a592c-scripts\") pod \"e85344ea-aeff-4826-bcd0-a4ea659a592c\" (UID: \"e85344ea-aeff-4826-bcd0-a4ea659a592c\") " Oct 11 05:09:15 crc kubenswrapper[4651]: I1011 05:09:15.835617 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e85344ea-aeff-4826-bcd0-a4ea659a592c-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "e85344ea-aeff-4826-bcd0-a4ea659a592c" (UID: "e85344ea-aeff-4826-bcd0-a4ea659a592c"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 05:09:15 crc kubenswrapper[4651]: I1011 05:09:15.835717 4651 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e85344ea-aeff-4826-bcd0-a4ea659a592c-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 11 05:09:15 crc kubenswrapper[4651]: I1011 05:09:15.835736 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e85344ea-aeff-4826-bcd0-a4ea659a592c-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "e85344ea-aeff-4826-bcd0-a4ea659a592c" (UID: "e85344ea-aeff-4826-bcd0-a4ea659a592c"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 05:09:15 crc kubenswrapper[4651]: I1011 05:09:15.841382 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e85344ea-aeff-4826-bcd0-a4ea659a592c-scripts" (OuterVolumeSpecName: "scripts") pod "e85344ea-aeff-4826-bcd0-a4ea659a592c" (UID: "e85344ea-aeff-4826-bcd0-a4ea659a592c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:09:15 crc kubenswrapper[4651]: I1011 05:09:15.842858 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e85344ea-aeff-4826-bcd0-a4ea659a592c-kube-api-access-pzmht" (OuterVolumeSpecName: "kube-api-access-pzmht") pod "e85344ea-aeff-4826-bcd0-a4ea659a592c" (UID: "e85344ea-aeff-4826-bcd0-a4ea659a592c"). InnerVolumeSpecName "kube-api-access-pzmht". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:09:15 crc kubenswrapper[4651]: I1011 05:09:15.888659 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="93e6a5fd-218d-44c2-9bf9-2796b1ff0c33" path="/var/lib/kubelet/pods/93e6a5fd-218d-44c2-9bf9-2796b1ff0c33/volumes" Oct 11 05:09:15 crc kubenswrapper[4651]: I1011 05:09:15.889679 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e85344ea-aeff-4826-bcd0-a4ea659a592c-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "e85344ea-aeff-4826-bcd0-a4ea659a592c" (UID: "e85344ea-aeff-4826-bcd0-a4ea659a592c"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:09:15 crc kubenswrapper[4651]: I1011 05:09:15.896033 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e85344ea-aeff-4826-bcd0-a4ea659a592c-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "e85344ea-aeff-4826-bcd0-a4ea659a592c" (UID: "e85344ea-aeff-4826-bcd0-a4ea659a592c"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:09:15 crc kubenswrapper[4651]: I1011 05:09:15.932425 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e85344ea-aeff-4826-bcd0-a4ea659a592c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e85344ea-aeff-4826-bcd0-a4ea659a592c" (UID: "e85344ea-aeff-4826-bcd0-a4ea659a592c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:09:15 crc kubenswrapper[4651]: I1011 05:09:15.937968 4651 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e85344ea-aeff-4826-bcd0-a4ea659a592c-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 11 05:09:15 crc kubenswrapper[4651]: I1011 05:09:15.937993 4651 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e85344ea-aeff-4826-bcd0-a4ea659a592c-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 11 05:09:15 crc kubenswrapper[4651]: I1011 05:09:15.938002 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pzmht\" (UniqueName: \"kubernetes.io/projected/e85344ea-aeff-4826-bcd0-a4ea659a592c-kube-api-access-pzmht\") on node \"crc\" DevicePath \"\"" Oct 11 05:09:15 crc kubenswrapper[4651]: I1011 05:09:15.938012 4651 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/e85344ea-aeff-4826-bcd0-a4ea659a592c-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 11 05:09:15 crc kubenswrapper[4651]: I1011 05:09:15.938020 4651 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e85344ea-aeff-4826-bcd0-a4ea659a592c-scripts\") on node \"crc\" DevicePath \"\"" Oct 11 05:09:15 crc kubenswrapper[4651]: I1011 05:09:15.938028 4651 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e85344ea-aeff-4826-bcd0-a4ea659a592c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 05:09:15 crc kubenswrapper[4651]: I1011 05:09:15.974277 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e85344ea-aeff-4826-bcd0-a4ea659a592c-config-data" (OuterVolumeSpecName: "config-data") pod "e85344ea-aeff-4826-bcd0-a4ea659a592c" (UID: "e85344ea-aeff-4826-bcd0-a4ea659a592c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:09:16 crc kubenswrapper[4651]: I1011 05:09:16.040357 4651 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e85344ea-aeff-4826-bcd0-a4ea659a592c-config-data\") on node \"crc\" DevicePath \"\"" Oct 11 05:09:16 crc kubenswrapper[4651]: I1011 05:09:16.378262 4651 generic.go:334] "Generic (PLEG): container finished" podID="e85344ea-aeff-4826-bcd0-a4ea659a592c" containerID="c1f0e810c70514e3326d0ce0ca8ce2e3ab173b430557087c1005f87d442e022c" exitCode=0 Oct 11 05:09:16 crc kubenswrapper[4651]: I1011 05:09:16.378311 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e85344ea-aeff-4826-bcd0-a4ea659a592c","Type":"ContainerDied","Data":"c1f0e810c70514e3326d0ce0ca8ce2e3ab173b430557087c1005f87d442e022c"} Oct 11 05:09:16 crc kubenswrapper[4651]: I1011 05:09:16.378351 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 11 05:09:16 crc kubenswrapper[4651]: I1011 05:09:16.378376 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e85344ea-aeff-4826-bcd0-a4ea659a592c","Type":"ContainerDied","Data":"b70fead4ab177ec3596918380087614dcad4b1b0a5962362be9c155ae2409e32"} Oct 11 05:09:16 crc kubenswrapper[4651]: I1011 05:09:16.378395 4651 scope.go:117] "RemoveContainer" containerID="aa2590bee7fb46cda2d6db3578c059b4dd24d7432aac82d817e38fe23cec4afe" Oct 11 05:09:16 crc kubenswrapper[4651]: I1011 05:09:16.399750 4651 scope.go:117] "RemoveContainer" containerID="925e800403fbb9cbdc3a63317b61d075ff4f00baa4f06d486513f69dac15521a" Oct 11 05:09:16 crc kubenswrapper[4651]: I1011 05:09:16.425757 4651 scope.go:117] "RemoveContainer" containerID="fb264949f0255cb352656cf94c7608050521ad3b67f420f1425549dc8ef8b1bf" Oct 11 05:09:16 crc kubenswrapper[4651]: I1011 05:09:16.428930 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 11 05:09:16 crc kubenswrapper[4651]: I1011 05:09:16.439629 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 11 05:09:16 crc kubenswrapper[4651]: I1011 05:09:16.451857 4651 scope.go:117] "RemoveContainer" containerID="c1f0e810c70514e3326d0ce0ca8ce2e3ab173b430557087c1005f87d442e022c" Oct 11 05:09:16 crc kubenswrapper[4651]: I1011 05:09:16.456870 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 11 05:09:16 crc kubenswrapper[4651]: E1011 05:09:16.457329 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="93e6a5fd-218d-44c2-9bf9-2796b1ff0c33" containerName="horizon-log" Oct 11 05:09:16 crc kubenswrapper[4651]: I1011 05:09:16.457351 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="93e6a5fd-218d-44c2-9bf9-2796b1ff0c33" containerName="horizon-log" Oct 11 05:09:16 crc kubenswrapper[4651]: E1011 05:09:16.457377 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e85344ea-aeff-4826-bcd0-a4ea659a592c" containerName="ceilometer-notification-agent" Oct 11 05:09:16 crc kubenswrapper[4651]: I1011 05:09:16.457387 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="e85344ea-aeff-4826-bcd0-a4ea659a592c" containerName="ceilometer-notification-agent" Oct 11 05:09:16 crc kubenswrapper[4651]: E1011 05:09:16.457434 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e85344ea-aeff-4826-bcd0-a4ea659a592c" containerName="proxy-httpd" Oct 11 05:09:16 crc kubenswrapper[4651]: I1011 05:09:16.457443 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="e85344ea-aeff-4826-bcd0-a4ea659a592c" containerName="proxy-httpd" Oct 11 05:09:16 crc kubenswrapper[4651]: E1011 05:09:16.457463 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="93e6a5fd-218d-44c2-9bf9-2796b1ff0c33" containerName="horizon" Oct 11 05:09:16 crc kubenswrapper[4651]: I1011 05:09:16.457473 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="93e6a5fd-218d-44c2-9bf9-2796b1ff0c33" containerName="horizon" Oct 11 05:09:16 crc kubenswrapper[4651]: E1011 05:09:16.457518 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e85344ea-aeff-4826-bcd0-a4ea659a592c" containerName="sg-core" Oct 11 05:09:16 crc kubenswrapper[4651]: I1011 05:09:16.457529 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="e85344ea-aeff-4826-bcd0-a4ea659a592c" containerName="sg-core" Oct 11 05:09:16 crc kubenswrapper[4651]: E1011 05:09:16.457548 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e85344ea-aeff-4826-bcd0-a4ea659a592c" containerName="ceilometer-central-agent" Oct 11 05:09:16 crc kubenswrapper[4651]: I1011 05:09:16.457584 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="e85344ea-aeff-4826-bcd0-a4ea659a592c" containerName="ceilometer-central-agent" Oct 11 05:09:16 crc kubenswrapper[4651]: I1011 05:09:16.457886 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="93e6a5fd-218d-44c2-9bf9-2796b1ff0c33" containerName="horizon" Oct 11 05:09:16 crc kubenswrapper[4651]: I1011 05:09:16.457908 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="e85344ea-aeff-4826-bcd0-a4ea659a592c" containerName="sg-core" Oct 11 05:09:16 crc kubenswrapper[4651]: I1011 05:09:16.457931 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="e85344ea-aeff-4826-bcd0-a4ea659a592c" containerName="ceilometer-notification-agent" Oct 11 05:09:16 crc kubenswrapper[4651]: I1011 05:09:16.457947 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="e85344ea-aeff-4826-bcd0-a4ea659a592c" containerName="proxy-httpd" Oct 11 05:09:16 crc kubenswrapper[4651]: I1011 05:09:16.457965 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="e85344ea-aeff-4826-bcd0-a4ea659a592c" containerName="ceilometer-central-agent" Oct 11 05:09:16 crc kubenswrapper[4651]: I1011 05:09:16.457981 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="93e6a5fd-218d-44c2-9bf9-2796b1ff0c33" containerName="horizon-log" Oct 11 05:09:16 crc kubenswrapper[4651]: I1011 05:09:16.460905 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 11 05:09:16 crc kubenswrapper[4651]: I1011 05:09:16.463293 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Oct 11 05:09:16 crc kubenswrapper[4651]: I1011 05:09:16.463549 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 11 05:09:16 crc kubenswrapper[4651]: I1011 05:09:16.463680 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 11 05:09:16 crc kubenswrapper[4651]: I1011 05:09:16.464453 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 11 05:09:16 crc kubenswrapper[4651]: I1011 05:09:16.504520 4651 scope.go:117] "RemoveContainer" containerID="aa2590bee7fb46cda2d6db3578c059b4dd24d7432aac82d817e38fe23cec4afe" Oct 11 05:09:16 crc kubenswrapper[4651]: E1011 05:09:16.505498 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aa2590bee7fb46cda2d6db3578c059b4dd24d7432aac82d817e38fe23cec4afe\": container with ID starting with aa2590bee7fb46cda2d6db3578c059b4dd24d7432aac82d817e38fe23cec4afe not found: ID does not exist" containerID="aa2590bee7fb46cda2d6db3578c059b4dd24d7432aac82d817e38fe23cec4afe" Oct 11 05:09:16 crc kubenswrapper[4651]: I1011 05:09:16.505557 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aa2590bee7fb46cda2d6db3578c059b4dd24d7432aac82d817e38fe23cec4afe"} err="failed to get container status \"aa2590bee7fb46cda2d6db3578c059b4dd24d7432aac82d817e38fe23cec4afe\": rpc error: code = NotFound desc = could not find container \"aa2590bee7fb46cda2d6db3578c059b4dd24d7432aac82d817e38fe23cec4afe\": container with ID starting with aa2590bee7fb46cda2d6db3578c059b4dd24d7432aac82d817e38fe23cec4afe not found: ID does not exist" Oct 11 05:09:16 crc kubenswrapper[4651]: I1011 05:09:16.505587 4651 scope.go:117] "RemoveContainer" containerID="925e800403fbb9cbdc3a63317b61d075ff4f00baa4f06d486513f69dac15521a" Oct 11 05:09:16 crc kubenswrapper[4651]: E1011 05:09:16.506949 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"925e800403fbb9cbdc3a63317b61d075ff4f00baa4f06d486513f69dac15521a\": container with ID starting with 925e800403fbb9cbdc3a63317b61d075ff4f00baa4f06d486513f69dac15521a not found: ID does not exist" containerID="925e800403fbb9cbdc3a63317b61d075ff4f00baa4f06d486513f69dac15521a" Oct 11 05:09:16 crc kubenswrapper[4651]: I1011 05:09:16.506989 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"925e800403fbb9cbdc3a63317b61d075ff4f00baa4f06d486513f69dac15521a"} err="failed to get container status \"925e800403fbb9cbdc3a63317b61d075ff4f00baa4f06d486513f69dac15521a\": rpc error: code = NotFound desc = could not find container \"925e800403fbb9cbdc3a63317b61d075ff4f00baa4f06d486513f69dac15521a\": container with ID starting with 925e800403fbb9cbdc3a63317b61d075ff4f00baa4f06d486513f69dac15521a not found: ID does not exist" Oct 11 05:09:16 crc kubenswrapper[4651]: I1011 05:09:16.507015 4651 scope.go:117] "RemoveContainer" containerID="fb264949f0255cb352656cf94c7608050521ad3b67f420f1425549dc8ef8b1bf" Oct 11 05:09:16 crc kubenswrapper[4651]: E1011 05:09:16.507551 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fb264949f0255cb352656cf94c7608050521ad3b67f420f1425549dc8ef8b1bf\": container with ID starting with fb264949f0255cb352656cf94c7608050521ad3b67f420f1425549dc8ef8b1bf not found: ID does not exist" containerID="fb264949f0255cb352656cf94c7608050521ad3b67f420f1425549dc8ef8b1bf" Oct 11 05:09:16 crc kubenswrapper[4651]: I1011 05:09:16.507591 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fb264949f0255cb352656cf94c7608050521ad3b67f420f1425549dc8ef8b1bf"} err="failed to get container status \"fb264949f0255cb352656cf94c7608050521ad3b67f420f1425549dc8ef8b1bf\": rpc error: code = NotFound desc = could not find container \"fb264949f0255cb352656cf94c7608050521ad3b67f420f1425549dc8ef8b1bf\": container with ID starting with fb264949f0255cb352656cf94c7608050521ad3b67f420f1425549dc8ef8b1bf not found: ID does not exist" Oct 11 05:09:16 crc kubenswrapper[4651]: I1011 05:09:16.507617 4651 scope.go:117] "RemoveContainer" containerID="c1f0e810c70514e3326d0ce0ca8ce2e3ab173b430557087c1005f87d442e022c" Oct 11 05:09:16 crc kubenswrapper[4651]: E1011 05:09:16.508040 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c1f0e810c70514e3326d0ce0ca8ce2e3ab173b430557087c1005f87d442e022c\": container with ID starting with c1f0e810c70514e3326d0ce0ca8ce2e3ab173b430557087c1005f87d442e022c not found: ID does not exist" containerID="c1f0e810c70514e3326d0ce0ca8ce2e3ab173b430557087c1005f87d442e022c" Oct 11 05:09:16 crc kubenswrapper[4651]: I1011 05:09:16.508090 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c1f0e810c70514e3326d0ce0ca8ce2e3ab173b430557087c1005f87d442e022c"} err="failed to get container status \"c1f0e810c70514e3326d0ce0ca8ce2e3ab173b430557087c1005f87d442e022c\": rpc error: code = NotFound desc = could not find container \"c1f0e810c70514e3326d0ce0ca8ce2e3ab173b430557087c1005f87d442e022c\": container with ID starting with c1f0e810c70514e3326d0ce0ca8ce2e3ab173b430557087c1005f87d442e022c not found: ID does not exist" Oct 11 05:09:16 crc kubenswrapper[4651]: I1011 05:09:16.548041 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d85f94e-09fd-4655-a708-0ff3a88e3c35-config-data\") pod \"ceilometer-0\" (UID: \"8d85f94e-09fd-4655-a708-0ff3a88e3c35\") " pod="openstack/ceilometer-0" Oct 11 05:09:16 crc kubenswrapper[4651]: I1011 05:09:16.548127 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8d85f94e-09fd-4655-a708-0ff3a88e3c35-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"8d85f94e-09fd-4655-a708-0ff3a88e3c35\") " pod="openstack/ceilometer-0" Oct 11 05:09:16 crc kubenswrapper[4651]: I1011 05:09:16.548236 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c28t4\" (UniqueName: \"kubernetes.io/projected/8d85f94e-09fd-4655-a708-0ff3a88e3c35-kube-api-access-c28t4\") pod \"ceilometer-0\" (UID: \"8d85f94e-09fd-4655-a708-0ff3a88e3c35\") " pod="openstack/ceilometer-0" Oct 11 05:09:16 crc kubenswrapper[4651]: I1011 05:09:16.548282 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8d85f94e-09fd-4655-a708-0ff3a88e3c35-log-httpd\") pod \"ceilometer-0\" (UID: \"8d85f94e-09fd-4655-a708-0ff3a88e3c35\") " pod="openstack/ceilometer-0" Oct 11 05:09:16 crc kubenswrapper[4651]: I1011 05:09:16.548314 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8d85f94e-09fd-4655-a708-0ff3a88e3c35-scripts\") pod \"ceilometer-0\" (UID: \"8d85f94e-09fd-4655-a708-0ff3a88e3c35\") " pod="openstack/ceilometer-0" Oct 11 05:09:16 crc kubenswrapper[4651]: I1011 05:09:16.548449 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8d85f94e-09fd-4655-a708-0ff3a88e3c35-run-httpd\") pod \"ceilometer-0\" (UID: \"8d85f94e-09fd-4655-a708-0ff3a88e3c35\") " pod="openstack/ceilometer-0" Oct 11 05:09:16 crc kubenswrapper[4651]: I1011 05:09:16.548499 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d85f94e-09fd-4655-a708-0ff3a88e3c35-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"8d85f94e-09fd-4655-a708-0ff3a88e3c35\") " pod="openstack/ceilometer-0" Oct 11 05:09:16 crc kubenswrapper[4651]: I1011 05:09:16.548544 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/8d85f94e-09fd-4655-a708-0ff3a88e3c35-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"8d85f94e-09fd-4655-a708-0ff3a88e3c35\") " pod="openstack/ceilometer-0" Oct 11 05:09:16 crc kubenswrapper[4651]: I1011 05:09:16.650256 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8d85f94e-09fd-4655-a708-0ff3a88e3c35-scripts\") pod \"ceilometer-0\" (UID: \"8d85f94e-09fd-4655-a708-0ff3a88e3c35\") " pod="openstack/ceilometer-0" Oct 11 05:09:16 crc kubenswrapper[4651]: I1011 05:09:16.650402 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8d85f94e-09fd-4655-a708-0ff3a88e3c35-run-httpd\") pod \"ceilometer-0\" (UID: \"8d85f94e-09fd-4655-a708-0ff3a88e3c35\") " pod="openstack/ceilometer-0" Oct 11 05:09:16 crc kubenswrapper[4651]: I1011 05:09:16.650435 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d85f94e-09fd-4655-a708-0ff3a88e3c35-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"8d85f94e-09fd-4655-a708-0ff3a88e3c35\") " pod="openstack/ceilometer-0" Oct 11 05:09:16 crc kubenswrapper[4651]: I1011 05:09:16.650472 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/8d85f94e-09fd-4655-a708-0ff3a88e3c35-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"8d85f94e-09fd-4655-a708-0ff3a88e3c35\") " pod="openstack/ceilometer-0" Oct 11 05:09:16 crc kubenswrapper[4651]: I1011 05:09:16.650528 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d85f94e-09fd-4655-a708-0ff3a88e3c35-config-data\") pod \"ceilometer-0\" (UID: \"8d85f94e-09fd-4655-a708-0ff3a88e3c35\") " pod="openstack/ceilometer-0" Oct 11 05:09:16 crc kubenswrapper[4651]: I1011 05:09:16.650552 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8d85f94e-09fd-4655-a708-0ff3a88e3c35-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"8d85f94e-09fd-4655-a708-0ff3a88e3c35\") " pod="openstack/ceilometer-0" Oct 11 05:09:16 crc kubenswrapper[4651]: I1011 05:09:16.650596 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c28t4\" (UniqueName: \"kubernetes.io/projected/8d85f94e-09fd-4655-a708-0ff3a88e3c35-kube-api-access-c28t4\") pod \"ceilometer-0\" (UID: \"8d85f94e-09fd-4655-a708-0ff3a88e3c35\") " pod="openstack/ceilometer-0" Oct 11 05:09:16 crc kubenswrapper[4651]: I1011 05:09:16.650626 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8d85f94e-09fd-4655-a708-0ff3a88e3c35-log-httpd\") pod \"ceilometer-0\" (UID: \"8d85f94e-09fd-4655-a708-0ff3a88e3c35\") " pod="openstack/ceilometer-0" Oct 11 05:09:16 crc kubenswrapper[4651]: I1011 05:09:16.651149 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8d85f94e-09fd-4655-a708-0ff3a88e3c35-run-httpd\") pod \"ceilometer-0\" (UID: \"8d85f94e-09fd-4655-a708-0ff3a88e3c35\") " pod="openstack/ceilometer-0" Oct 11 05:09:16 crc kubenswrapper[4651]: I1011 05:09:16.651168 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8d85f94e-09fd-4655-a708-0ff3a88e3c35-log-httpd\") pod \"ceilometer-0\" (UID: \"8d85f94e-09fd-4655-a708-0ff3a88e3c35\") " pod="openstack/ceilometer-0" Oct 11 05:09:16 crc kubenswrapper[4651]: I1011 05:09:16.653959 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8d85f94e-09fd-4655-a708-0ff3a88e3c35-scripts\") pod \"ceilometer-0\" (UID: \"8d85f94e-09fd-4655-a708-0ff3a88e3c35\") " pod="openstack/ceilometer-0" Oct 11 05:09:16 crc kubenswrapper[4651]: I1011 05:09:16.654793 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8d85f94e-09fd-4655-a708-0ff3a88e3c35-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"8d85f94e-09fd-4655-a708-0ff3a88e3c35\") " pod="openstack/ceilometer-0" Oct 11 05:09:16 crc kubenswrapper[4651]: I1011 05:09:16.658265 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d85f94e-09fd-4655-a708-0ff3a88e3c35-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"8d85f94e-09fd-4655-a708-0ff3a88e3c35\") " pod="openstack/ceilometer-0" Oct 11 05:09:16 crc kubenswrapper[4651]: I1011 05:09:16.662304 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d85f94e-09fd-4655-a708-0ff3a88e3c35-config-data\") pod \"ceilometer-0\" (UID: \"8d85f94e-09fd-4655-a708-0ff3a88e3c35\") " pod="openstack/ceilometer-0" Oct 11 05:09:16 crc kubenswrapper[4651]: I1011 05:09:16.666710 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/8d85f94e-09fd-4655-a708-0ff3a88e3c35-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"8d85f94e-09fd-4655-a708-0ff3a88e3c35\") " pod="openstack/ceilometer-0" Oct 11 05:09:16 crc kubenswrapper[4651]: I1011 05:09:16.673102 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c28t4\" (UniqueName: \"kubernetes.io/projected/8d85f94e-09fd-4655-a708-0ff3a88e3c35-kube-api-access-c28t4\") pod \"ceilometer-0\" (UID: \"8d85f94e-09fd-4655-a708-0ff3a88e3c35\") " pod="openstack/ceilometer-0" Oct 11 05:09:16 crc kubenswrapper[4651]: I1011 05:09:16.819313 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 11 05:09:16 crc kubenswrapper[4651]: I1011 05:09:16.845844 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 11 05:09:16 crc kubenswrapper[4651]: I1011 05:09:16.846118 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="5c04eb41-eb14-4687-a0fa-56f07612da15" containerName="glance-log" containerID="cri-o://dc526a3f9f7a83761b341c698b7ac5c3eea9e0a70fbc06131f5b4708b2e6dd8d" gracePeriod=30 Oct 11 05:09:16 crc kubenswrapper[4651]: I1011 05:09:16.846580 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="5c04eb41-eb14-4687-a0fa-56f07612da15" containerName="glance-httpd" containerID="cri-o://1510c5a0e22592e06028411eccbbe08cd69f4981baf82da22394ea597b956109" gracePeriod=30 Oct 11 05:09:17 crc kubenswrapper[4651]: I1011 05:09:17.269076 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-stlvp"] Oct 11 05:09:17 crc kubenswrapper[4651]: I1011 05:09:17.270894 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-stlvp" Oct 11 05:09:17 crc kubenswrapper[4651]: I1011 05:09:17.290041 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-stlvp"] Oct 11 05:09:17 crc kubenswrapper[4651]: I1011 05:09:17.322942 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 11 05:09:17 crc kubenswrapper[4651]: I1011 05:09:17.360810 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-p2nff"] Oct 11 05:09:17 crc kubenswrapper[4651]: I1011 05:09:17.361990 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-p2nff" Oct 11 05:09:17 crc kubenswrapper[4651]: I1011 05:09:17.373656 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-p2nff"] Oct 11 05:09:17 crc kubenswrapper[4651]: I1011 05:09:17.377953 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zh7xk\" (UniqueName: \"kubernetes.io/projected/1bbe8f4f-c85e-46aa-a214-e28fad1722dc-kube-api-access-zh7xk\") pod \"nova-api-db-create-stlvp\" (UID: \"1bbe8f4f-c85e-46aa-a214-e28fad1722dc\") " pod="openstack/nova-api-db-create-stlvp" Oct 11 05:09:17 crc kubenswrapper[4651]: I1011 05:09:17.392251 4651 generic.go:334] "Generic (PLEG): container finished" podID="5c04eb41-eb14-4687-a0fa-56f07612da15" containerID="dc526a3f9f7a83761b341c698b7ac5c3eea9e0a70fbc06131f5b4708b2e6dd8d" exitCode=143 Oct 11 05:09:17 crc kubenswrapper[4651]: I1011 05:09:17.392317 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"5c04eb41-eb14-4687-a0fa-56f07612da15","Type":"ContainerDied","Data":"dc526a3f9f7a83761b341c698b7ac5c3eea9e0a70fbc06131f5b4708b2e6dd8d"} Oct 11 05:09:17 crc kubenswrapper[4651]: I1011 05:09:17.395514 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8d85f94e-09fd-4655-a708-0ff3a88e3c35","Type":"ContainerStarted","Data":"96e68981b57f75cf77d1b090be3b1601d09636e62ed183aef6ee79e0da208a9d"} Oct 11 05:09:17 crc kubenswrapper[4651]: I1011 05:09:17.458335 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-vjf9n"] Oct 11 05:09:17 crc kubenswrapper[4651]: I1011 05:09:17.460476 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-vjf9n" Oct 11 05:09:17 crc kubenswrapper[4651]: I1011 05:09:17.468647 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-vjf9n"] Oct 11 05:09:17 crc kubenswrapper[4651]: I1011 05:09:17.481945 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zh7xk\" (UniqueName: \"kubernetes.io/projected/1bbe8f4f-c85e-46aa-a214-e28fad1722dc-kube-api-access-zh7xk\") pod \"nova-api-db-create-stlvp\" (UID: \"1bbe8f4f-c85e-46aa-a214-e28fad1722dc\") " pod="openstack/nova-api-db-create-stlvp" Oct 11 05:09:17 crc kubenswrapper[4651]: I1011 05:09:17.482032 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-srpzt\" (UniqueName: \"kubernetes.io/projected/3cb7ede0-6fea-4867-941e-13a4c5637543-kube-api-access-srpzt\") pod \"nova-cell0-db-create-p2nff\" (UID: \"3cb7ede0-6fea-4867-941e-13a4c5637543\") " pod="openstack/nova-cell0-db-create-p2nff" Oct 11 05:09:17 crc kubenswrapper[4651]: I1011 05:09:17.504876 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zh7xk\" (UniqueName: \"kubernetes.io/projected/1bbe8f4f-c85e-46aa-a214-e28fad1722dc-kube-api-access-zh7xk\") pod \"nova-api-db-create-stlvp\" (UID: \"1bbe8f4f-c85e-46aa-a214-e28fad1722dc\") " pod="openstack/nova-api-db-create-stlvp" Oct 11 05:09:17 crc kubenswrapper[4651]: I1011 05:09:17.583548 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gm86v\" (UniqueName: \"kubernetes.io/projected/b608bb15-f67e-4192-b820-29752a3cd443-kube-api-access-gm86v\") pod \"nova-cell1-db-create-vjf9n\" (UID: \"b608bb15-f67e-4192-b820-29752a3cd443\") " pod="openstack/nova-cell1-db-create-vjf9n" Oct 11 05:09:17 crc kubenswrapper[4651]: I1011 05:09:17.583629 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-srpzt\" (UniqueName: \"kubernetes.io/projected/3cb7ede0-6fea-4867-941e-13a4c5637543-kube-api-access-srpzt\") pod \"nova-cell0-db-create-p2nff\" (UID: \"3cb7ede0-6fea-4867-941e-13a4c5637543\") " pod="openstack/nova-cell0-db-create-p2nff" Oct 11 05:09:17 crc kubenswrapper[4651]: I1011 05:09:17.598622 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-srpzt\" (UniqueName: \"kubernetes.io/projected/3cb7ede0-6fea-4867-941e-13a4c5637543-kube-api-access-srpzt\") pod \"nova-cell0-db-create-p2nff\" (UID: \"3cb7ede0-6fea-4867-941e-13a4c5637543\") " pod="openstack/nova-cell0-db-create-p2nff" Oct 11 05:09:17 crc kubenswrapper[4651]: I1011 05:09:17.626551 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-stlvp" Oct 11 05:09:17 crc kubenswrapper[4651]: I1011 05:09:17.681994 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-p2nff" Oct 11 05:09:17 crc kubenswrapper[4651]: I1011 05:09:17.682985 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Oct 11 05:09:17 crc kubenswrapper[4651]: I1011 05:09:17.684909 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gm86v\" (UniqueName: \"kubernetes.io/projected/b608bb15-f67e-4192-b820-29752a3cd443-kube-api-access-gm86v\") pod \"nova-cell1-db-create-vjf9n\" (UID: \"b608bb15-f67e-4192-b820-29752a3cd443\") " pod="openstack/nova-cell1-db-create-vjf9n" Oct 11 05:09:17 crc kubenswrapper[4651]: I1011 05:09:17.709488 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gm86v\" (UniqueName: \"kubernetes.io/projected/b608bb15-f67e-4192-b820-29752a3cd443-kube-api-access-gm86v\") pod \"nova-cell1-db-create-vjf9n\" (UID: \"b608bb15-f67e-4192-b820-29752a3cd443\") " pod="openstack/nova-cell1-db-create-vjf9n" Oct 11 05:09:17 crc kubenswrapper[4651]: I1011 05:09:17.776155 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-vjf9n" Oct 11 05:09:17 crc kubenswrapper[4651]: I1011 05:09:17.882616 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e85344ea-aeff-4826-bcd0-a4ea659a592c" path="/var/lib/kubelet/pods/e85344ea-aeff-4826-bcd0-a4ea659a592c/volumes" Oct 11 05:09:18 crc kubenswrapper[4651]: I1011 05:09:18.131624 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-stlvp"] Oct 11 05:09:18 crc kubenswrapper[4651]: I1011 05:09:18.220348 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-p2nff"] Oct 11 05:09:18 crc kubenswrapper[4651]: I1011 05:09:18.280382 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-vjf9n"] Oct 11 05:09:18 crc kubenswrapper[4651]: W1011 05:09:18.288976 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb608bb15_f67e_4192_b820_29752a3cd443.slice/crio-27824a64591ea21fe61e73bf7f244425ee6fe86c2a6e63052d95d61308b077a7 WatchSource:0}: Error finding container 27824a64591ea21fe61e73bf7f244425ee6fe86c2a6e63052d95d61308b077a7: Status 404 returned error can't find the container with id 27824a64591ea21fe61e73bf7f244425ee6fe86c2a6e63052d95d61308b077a7 Oct 11 05:09:18 crc kubenswrapper[4651]: I1011 05:09:18.419125 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-p2nff" event={"ID":"3cb7ede0-6fea-4867-941e-13a4c5637543","Type":"ContainerStarted","Data":"17c260016cab529da5c1f8a84035c3d8f47d0277c3ac62d9d57ffd0d569f9e3c"} Oct 11 05:09:18 crc kubenswrapper[4651]: I1011 05:09:18.420853 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-stlvp" event={"ID":"1bbe8f4f-c85e-46aa-a214-e28fad1722dc","Type":"ContainerStarted","Data":"879ace51e67888bdbba03be782d5876c7352a92c3019cb8dac3d0010a9289e02"} Oct 11 05:09:18 crc kubenswrapper[4651]: I1011 05:09:18.422406 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8d85f94e-09fd-4655-a708-0ff3a88e3c35","Type":"ContainerStarted","Data":"ada35ed2843e8625fb4507f6a29a52188baee8423ccdea3342aba8b95de44a57"} Oct 11 05:09:18 crc kubenswrapper[4651]: I1011 05:09:18.423513 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-vjf9n" event={"ID":"b608bb15-f67e-4192-b820-29752a3cd443","Type":"ContainerStarted","Data":"27824a64591ea21fe61e73bf7f244425ee6fe86c2a6e63052d95d61308b077a7"} Oct 11 05:09:18 crc kubenswrapper[4651]: I1011 05:09:18.425407 4651 generic.go:334] "Generic (PLEG): container finished" podID="6d67b157-9258-41f2-b942-bc5a54f1ea21" containerID="ad867022b00e0c78ace5afa4d483a112cefb1a74135162c9bbe0403ebcac754a" exitCode=0 Oct 11 05:09:18 crc kubenswrapper[4651]: I1011 05:09:18.425453 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"6d67b157-9258-41f2-b942-bc5a54f1ea21","Type":"ContainerDied","Data":"ad867022b00e0c78ace5afa4d483a112cefb1a74135162c9bbe0403ebcac754a"} Oct 11 05:09:18 crc kubenswrapper[4651]: I1011 05:09:18.551246 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 11 05:09:18 crc kubenswrapper[4651]: I1011 05:09:18.603793 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6d67b157-9258-41f2-b942-bc5a54f1ea21-public-tls-certs\") pod \"6d67b157-9258-41f2-b942-bc5a54f1ea21\" (UID: \"6d67b157-9258-41f2-b942-bc5a54f1ea21\") " Oct 11 05:09:18 crc kubenswrapper[4651]: I1011 05:09:18.604061 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fp9p5\" (UniqueName: \"kubernetes.io/projected/6d67b157-9258-41f2-b942-bc5a54f1ea21-kube-api-access-fp9p5\") pod \"6d67b157-9258-41f2-b942-bc5a54f1ea21\" (UID: \"6d67b157-9258-41f2-b942-bc5a54f1ea21\") " Oct 11 05:09:18 crc kubenswrapper[4651]: I1011 05:09:18.604165 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"6d67b157-9258-41f2-b942-bc5a54f1ea21\" (UID: \"6d67b157-9258-41f2-b942-bc5a54f1ea21\") " Oct 11 05:09:18 crc kubenswrapper[4651]: I1011 05:09:18.604192 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6d67b157-9258-41f2-b942-bc5a54f1ea21-logs\") pod \"6d67b157-9258-41f2-b942-bc5a54f1ea21\" (UID: \"6d67b157-9258-41f2-b942-bc5a54f1ea21\") " Oct 11 05:09:18 crc kubenswrapper[4651]: I1011 05:09:18.604213 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6d67b157-9258-41f2-b942-bc5a54f1ea21-config-data\") pod \"6d67b157-9258-41f2-b942-bc5a54f1ea21\" (UID: \"6d67b157-9258-41f2-b942-bc5a54f1ea21\") " Oct 11 05:09:18 crc kubenswrapper[4651]: I1011 05:09:18.604369 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d67b157-9258-41f2-b942-bc5a54f1ea21-combined-ca-bundle\") pod \"6d67b157-9258-41f2-b942-bc5a54f1ea21\" (UID: \"6d67b157-9258-41f2-b942-bc5a54f1ea21\") " Oct 11 05:09:18 crc kubenswrapper[4651]: I1011 05:09:18.604401 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6d67b157-9258-41f2-b942-bc5a54f1ea21-scripts\") pod \"6d67b157-9258-41f2-b942-bc5a54f1ea21\" (UID: \"6d67b157-9258-41f2-b942-bc5a54f1ea21\") " Oct 11 05:09:18 crc kubenswrapper[4651]: I1011 05:09:18.604429 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6d67b157-9258-41f2-b942-bc5a54f1ea21-httpd-run\") pod \"6d67b157-9258-41f2-b942-bc5a54f1ea21\" (UID: \"6d67b157-9258-41f2-b942-bc5a54f1ea21\") " Oct 11 05:09:18 crc kubenswrapper[4651]: I1011 05:09:18.605469 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6d67b157-9258-41f2-b942-bc5a54f1ea21-logs" (OuterVolumeSpecName: "logs") pod "6d67b157-9258-41f2-b942-bc5a54f1ea21" (UID: "6d67b157-9258-41f2-b942-bc5a54f1ea21"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 05:09:18 crc kubenswrapper[4651]: I1011 05:09:18.606691 4651 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6d67b157-9258-41f2-b942-bc5a54f1ea21-logs\") on node \"crc\" DevicePath \"\"" Oct 11 05:09:18 crc kubenswrapper[4651]: I1011 05:09:18.606945 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6d67b157-9258-41f2-b942-bc5a54f1ea21-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "6d67b157-9258-41f2-b942-bc5a54f1ea21" (UID: "6d67b157-9258-41f2-b942-bc5a54f1ea21"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 05:09:18 crc kubenswrapper[4651]: I1011 05:09:18.618882 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage10-crc" (OuterVolumeSpecName: "glance") pod "6d67b157-9258-41f2-b942-bc5a54f1ea21" (UID: "6d67b157-9258-41f2-b942-bc5a54f1ea21"). InnerVolumeSpecName "local-storage10-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 11 05:09:18 crc kubenswrapper[4651]: I1011 05:09:18.619022 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6d67b157-9258-41f2-b942-bc5a54f1ea21-kube-api-access-fp9p5" (OuterVolumeSpecName: "kube-api-access-fp9p5") pod "6d67b157-9258-41f2-b942-bc5a54f1ea21" (UID: "6d67b157-9258-41f2-b942-bc5a54f1ea21"). InnerVolumeSpecName "kube-api-access-fp9p5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:09:18 crc kubenswrapper[4651]: I1011 05:09:18.623373 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6d67b157-9258-41f2-b942-bc5a54f1ea21-scripts" (OuterVolumeSpecName: "scripts") pod "6d67b157-9258-41f2-b942-bc5a54f1ea21" (UID: "6d67b157-9258-41f2-b942-bc5a54f1ea21"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:09:18 crc kubenswrapper[4651]: I1011 05:09:18.643004 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6d67b157-9258-41f2-b942-bc5a54f1ea21-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6d67b157-9258-41f2-b942-bc5a54f1ea21" (UID: "6d67b157-9258-41f2-b942-bc5a54f1ea21"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:09:18 crc kubenswrapper[4651]: I1011 05:09:18.666632 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6d67b157-9258-41f2-b942-bc5a54f1ea21-config-data" (OuterVolumeSpecName: "config-data") pod "6d67b157-9258-41f2-b942-bc5a54f1ea21" (UID: "6d67b157-9258-41f2-b942-bc5a54f1ea21"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:09:18 crc kubenswrapper[4651]: I1011 05:09:18.679607 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6d67b157-9258-41f2-b942-bc5a54f1ea21-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "6d67b157-9258-41f2-b942-bc5a54f1ea21" (UID: "6d67b157-9258-41f2-b942-bc5a54f1ea21"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:09:18 crc kubenswrapper[4651]: I1011 05:09:18.708296 4651 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6d67b157-9258-41f2-b942-bc5a54f1ea21-public-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 11 05:09:18 crc kubenswrapper[4651]: I1011 05:09:18.708329 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fp9p5\" (UniqueName: \"kubernetes.io/projected/6d67b157-9258-41f2-b942-bc5a54f1ea21-kube-api-access-fp9p5\") on node \"crc\" DevicePath \"\"" Oct 11 05:09:18 crc kubenswrapper[4651]: I1011 05:09:18.708372 4651 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" " Oct 11 05:09:18 crc kubenswrapper[4651]: I1011 05:09:18.708386 4651 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6d67b157-9258-41f2-b942-bc5a54f1ea21-config-data\") on node \"crc\" DevicePath \"\"" Oct 11 05:09:18 crc kubenswrapper[4651]: I1011 05:09:18.708398 4651 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d67b157-9258-41f2-b942-bc5a54f1ea21-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 05:09:18 crc kubenswrapper[4651]: I1011 05:09:18.708410 4651 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6d67b157-9258-41f2-b942-bc5a54f1ea21-scripts\") on node \"crc\" DevicePath \"\"" Oct 11 05:09:18 crc kubenswrapper[4651]: I1011 05:09:18.708419 4651 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6d67b157-9258-41f2-b942-bc5a54f1ea21-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 11 05:09:18 crc kubenswrapper[4651]: I1011 05:09:18.728950 4651 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage10-crc" (UniqueName: "kubernetes.io/local-volume/local-storage10-crc") on node "crc" Oct 11 05:09:18 crc kubenswrapper[4651]: I1011 05:09:18.810258 4651 reconciler_common.go:293] "Volume detached for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" DevicePath \"\"" Oct 11 05:09:18 crc kubenswrapper[4651]: I1011 05:09:18.951393 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 11 05:09:19 crc kubenswrapper[4651]: I1011 05:09:19.435780 4651 generic.go:334] "Generic (PLEG): container finished" podID="b608bb15-f67e-4192-b820-29752a3cd443" containerID="7ee629052e142cfd4778cbf4575b9ee3e1b2d3ac56c7f8a3791f76059068b59a" exitCode=0 Oct 11 05:09:19 crc kubenswrapper[4651]: I1011 05:09:19.436168 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-vjf9n" event={"ID":"b608bb15-f67e-4192-b820-29752a3cd443","Type":"ContainerDied","Data":"7ee629052e142cfd4778cbf4575b9ee3e1b2d3ac56c7f8a3791f76059068b59a"} Oct 11 05:09:19 crc kubenswrapper[4651]: I1011 05:09:19.438590 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"6d67b157-9258-41f2-b942-bc5a54f1ea21","Type":"ContainerDied","Data":"c24aef318ac682de54e21143be63340dc533dc25aa895d7f3e56c5ab5b8c0108"} Oct 11 05:09:19 crc kubenswrapper[4651]: I1011 05:09:19.438627 4651 scope.go:117] "RemoveContainer" containerID="ad867022b00e0c78ace5afa4d483a112cefb1a74135162c9bbe0403ebcac754a" Oct 11 05:09:19 crc kubenswrapper[4651]: I1011 05:09:19.438788 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 11 05:09:19 crc kubenswrapper[4651]: I1011 05:09:19.451621 4651 generic.go:334] "Generic (PLEG): container finished" podID="3cb7ede0-6fea-4867-941e-13a4c5637543" containerID="b3b5369c0a55289cfcc8db9cbad784bee96a7dc94e001fb91bd38773edb31c66" exitCode=0 Oct 11 05:09:19 crc kubenswrapper[4651]: I1011 05:09:19.451713 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-p2nff" event={"ID":"3cb7ede0-6fea-4867-941e-13a4c5637543","Type":"ContainerDied","Data":"b3b5369c0a55289cfcc8db9cbad784bee96a7dc94e001fb91bd38773edb31c66"} Oct 11 05:09:19 crc kubenswrapper[4651]: I1011 05:09:19.457402 4651 generic.go:334] "Generic (PLEG): container finished" podID="1bbe8f4f-c85e-46aa-a214-e28fad1722dc" containerID="055f0a58a9b908f88f1f6c166107ddd0debed02cd259082260e4f4c97f8a9c88" exitCode=0 Oct 11 05:09:19 crc kubenswrapper[4651]: I1011 05:09:19.457486 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-stlvp" event={"ID":"1bbe8f4f-c85e-46aa-a214-e28fad1722dc","Type":"ContainerDied","Data":"055f0a58a9b908f88f1f6c166107ddd0debed02cd259082260e4f4c97f8a9c88"} Oct 11 05:09:19 crc kubenswrapper[4651]: I1011 05:09:19.461022 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8d85f94e-09fd-4655-a708-0ff3a88e3c35","Type":"ContainerStarted","Data":"5aac2f5b65caacf570d31883365d712a9c8e515d3e45b7af4580dce080daf218"} Oct 11 05:09:19 crc kubenswrapper[4651]: I1011 05:09:19.461055 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8d85f94e-09fd-4655-a708-0ff3a88e3c35","Type":"ContainerStarted","Data":"fac62b9075fd04a29b5e17546553586969e4a45b32c113f37b1faeea7d210571"} Oct 11 05:09:19 crc kubenswrapper[4651]: I1011 05:09:19.470241 4651 scope.go:117] "RemoveContainer" containerID="b35b779a90a8d682f090c62733e4802b34318f19145eca63e6fd686fa49970b1" Oct 11 05:09:19 crc kubenswrapper[4651]: I1011 05:09:19.490099 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 11 05:09:19 crc kubenswrapper[4651]: I1011 05:09:19.504029 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 11 05:09:19 crc kubenswrapper[4651]: I1011 05:09:19.569088 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Oct 11 05:09:19 crc kubenswrapper[4651]: E1011 05:09:19.569714 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6d67b157-9258-41f2-b942-bc5a54f1ea21" containerName="glance-log" Oct 11 05:09:19 crc kubenswrapper[4651]: I1011 05:09:19.569734 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="6d67b157-9258-41f2-b942-bc5a54f1ea21" containerName="glance-log" Oct 11 05:09:19 crc kubenswrapper[4651]: E1011 05:09:19.569756 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6d67b157-9258-41f2-b942-bc5a54f1ea21" containerName="glance-httpd" Oct 11 05:09:19 crc kubenswrapper[4651]: I1011 05:09:19.569763 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="6d67b157-9258-41f2-b942-bc5a54f1ea21" containerName="glance-httpd" Oct 11 05:09:19 crc kubenswrapper[4651]: I1011 05:09:19.570169 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="6d67b157-9258-41f2-b942-bc5a54f1ea21" containerName="glance-httpd" Oct 11 05:09:19 crc kubenswrapper[4651]: I1011 05:09:19.570198 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="6d67b157-9258-41f2-b942-bc5a54f1ea21" containerName="glance-log" Oct 11 05:09:19 crc kubenswrapper[4651]: I1011 05:09:19.573161 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 11 05:09:19 crc kubenswrapper[4651]: I1011 05:09:19.577322 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Oct 11 05:09:19 crc kubenswrapper[4651]: I1011 05:09:19.577588 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Oct 11 05:09:19 crc kubenswrapper[4651]: I1011 05:09:19.609249 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 11 05:09:19 crc kubenswrapper[4651]: I1011 05:09:19.627963 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jh882\" (UniqueName: \"kubernetes.io/projected/ee7479a2-fd16-4ec2-a0e0-28fbaffbce9f-kube-api-access-jh882\") pod \"glance-default-external-api-0\" (UID: \"ee7479a2-fd16-4ec2-a0e0-28fbaffbce9f\") " pod="openstack/glance-default-external-api-0" Oct 11 05:09:19 crc kubenswrapper[4651]: I1011 05:09:19.628020 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ee7479a2-fd16-4ec2-a0e0-28fbaffbce9f-logs\") pod \"glance-default-external-api-0\" (UID: \"ee7479a2-fd16-4ec2-a0e0-28fbaffbce9f\") " pod="openstack/glance-default-external-api-0" Oct 11 05:09:19 crc kubenswrapper[4651]: I1011 05:09:19.628056 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ee7479a2-fd16-4ec2-a0e0-28fbaffbce9f-config-data\") pod \"glance-default-external-api-0\" (UID: \"ee7479a2-fd16-4ec2-a0e0-28fbaffbce9f\") " pod="openstack/glance-default-external-api-0" Oct 11 05:09:19 crc kubenswrapper[4651]: I1011 05:09:19.628090 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ee7479a2-fd16-4ec2-a0e0-28fbaffbce9f-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"ee7479a2-fd16-4ec2-a0e0-28fbaffbce9f\") " pod="openstack/glance-default-external-api-0" Oct 11 05:09:19 crc kubenswrapper[4651]: I1011 05:09:19.628119 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ee7479a2-fd16-4ec2-a0e0-28fbaffbce9f-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"ee7479a2-fd16-4ec2-a0e0-28fbaffbce9f\") " pod="openstack/glance-default-external-api-0" Oct 11 05:09:19 crc kubenswrapper[4651]: I1011 05:09:19.628161 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ee7479a2-fd16-4ec2-a0e0-28fbaffbce9f-scripts\") pod \"glance-default-external-api-0\" (UID: \"ee7479a2-fd16-4ec2-a0e0-28fbaffbce9f\") " pod="openstack/glance-default-external-api-0" Oct 11 05:09:19 crc kubenswrapper[4651]: I1011 05:09:19.628181 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"ee7479a2-fd16-4ec2-a0e0-28fbaffbce9f\") " pod="openstack/glance-default-external-api-0" Oct 11 05:09:19 crc kubenswrapper[4651]: I1011 05:09:19.628221 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee7479a2-fd16-4ec2-a0e0-28fbaffbce9f-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"ee7479a2-fd16-4ec2-a0e0-28fbaffbce9f\") " pod="openstack/glance-default-external-api-0" Oct 11 05:09:19 crc kubenswrapper[4651]: I1011 05:09:19.730065 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ee7479a2-fd16-4ec2-a0e0-28fbaffbce9f-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"ee7479a2-fd16-4ec2-a0e0-28fbaffbce9f\") " pod="openstack/glance-default-external-api-0" Oct 11 05:09:19 crc kubenswrapper[4651]: I1011 05:09:19.730159 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ee7479a2-fd16-4ec2-a0e0-28fbaffbce9f-scripts\") pod \"glance-default-external-api-0\" (UID: \"ee7479a2-fd16-4ec2-a0e0-28fbaffbce9f\") " pod="openstack/glance-default-external-api-0" Oct 11 05:09:19 crc kubenswrapper[4651]: I1011 05:09:19.730194 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"ee7479a2-fd16-4ec2-a0e0-28fbaffbce9f\") " pod="openstack/glance-default-external-api-0" Oct 11 05:09:19 crc kubenswrapper[4651]: I1011 05:09:19.730276 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee7479a2-fd16-4ec2-a0e0-28fbaffbce9f-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"ee7479a2-fd16-4ec2-a0e0-28fbaffbce9f\") " pod="openstack/glance-default-external-api-0" Oct 11 05:09:19 crc kubenswrapper[4651]: I1011 05:09:19.730697 4651 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"ee7479a2-fd16-4ec2-a0e0-28fbaffbce9f\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/glance-default-external-api-0" Oct 11 05:09:19 crc kubenswrapper[4651]: I1011 05:09:19.730857 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jh882\" (UniqueName: \"kubernetes.io/projected/ee7479a2-fd16-4ec2-a0e0-28fbaffbce9f-kube-api-access-jh882\") pod \"glance-default-external-api-0\" (UID: \"ee7479a2-fd16-4ec2-a0e0-28fbaffbce9f\") " pod="openstack/glance-default-external-api-0" Oct 11 05:09:19 crc kubenswrapper[4651]: I1011 05:09:19.730910 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ee7479a2-fd16-4ec2-a0e0-28fbaffbce9f-logs\") pod \"glance-default-external-api-0\" (UID: \"ee7479a2-fd16-4ec2-a0e0-28fbaffbce9f\") " pod="openstack/glance-default-external-api-0" Oct 11 05:09:19 crc kubenswrapper[4651]: I1011 05:09:19.730951 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ee7479a2-fd16-4ec2-a0e0-28fbaffbce9f-config-data\") pod \"glance-default-external-api-0\" (UID: \"ee7479a2-fd16-4ec2-a0e0-28fbaffbce9f\") " pod="openstack/glance-default-external-api-0" Oct 11 05:09:19 crc kubenswrapper[4651]: I1011 05:09:19.730960 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ee7479a2-fd16-4ec2-a0e0-28fbaffbce9f-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"ee7479a2-fd16-4ec2-a0e0-28fbaffbce9f\") " pod="openstack/glance-default-external-api-0" Oct 11 05:09:19 crc kubenswrapper[4651]: I1011 05:09:19.731026 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ee7479a2-fd16-4ec2-a0e0-28fbaffbce9f-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"ee7479a2-fd16-4ec2-a0e0-28fbaffbce9f\") " pod="openstack/glance-default-external-api-0" Oct 11 05:09:19 crc kubenswrapper[4651]: I1011 05:09:19.731751 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ee7479a2-fd16-4ec2-a0e0-28fbaffbce9f-logs\") pod \"glance-default-external-api-0\" (UID: \"ee7479a2-fd16-4ec2-a0e0-28fbaffbce9f\") " pod="openstack/glance-default-external-api-0" Oct 11 05:09:19 crc kubenswrapper[4651]: I1011 05:09:19.735447 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ee7479a2-fd16-4ec2-a0e0-28fbaffbce9f-scripts\") pod \"glance-default-external-api-0\" (UID: \"ee7479a2-fd16-4ec2-a0e0-28fbaffbce9f\") " pod="openstack/glance-default-external-api-0" Oct 11 05:09:19 crc kubenswrapper[4651]: I1011 05:09:19.739730 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee7479a2-fd16-4ec2-a0e0-28fbaffbce9f-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"ee7479a2-fd16-4ec2-a0e0-28fbaffbce9f\") " pod="openstack/glance-default-external-api-0" Oct 11 05:09:19 crc kubenswrapper[4651]: I1011 05:09:19.739936 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ee7479a2-fd16-4ec2-a0e0-28fbaffbce9f-config-data\") pod \"glance-default-external-api-0\" (UID: \"ee7479a2-fd16-4ec2-a0e0-28fbaffbce9f\") " pod="openstack/glance-default-external-api-0" Oct 11 05:09:19 crc kubenswrapper[4651]: I1011 05:09:19.748002 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ee7479a2-fd16-4ec2-a0e0-28fbaffbce9f-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"ee7479a2-fd16-4ec2-a0e0-28fbaffbce9f\") " pod="openstack/glance-default-external-api-0" Oct 11 05:09:19 crc kubenswrapper[4651]: I1011 05:09:19.751250 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jh882\" (UniqueName: \"kubernetes.io/projected/ee7479a2-fd16-4ec2-a0e0-28fbaffbce9f-kube-api-access-jh882\") pod \"glance-default-external-api-0\" (UID: \"ee7479a2-fd16-4ec2-a0e0-28fbaffbce9f\") " pod="openstack/glance-default-external-api-0" Oct 11 05:09:19 crc kubenswrapper[4651]: I1011 05:09:19.764704 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"ee7479a2-fd16-4ec2-a0e0-28fbaffbce9f\") " pod="openstack/glance-default-external-api-0" Oct 11 05:09:19 crc kubenswrapper[4651]: I1011 05:09:19.901076 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 11 05:09:19 crc kubenswrapper[4651]: I1011 05:09:19.926011 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6d67b157-9258-41f2-b942-bc5a54f1ea21" path="/var/lib/kubelet/pods/6d67b157-9258-41f2-b942-bc5a54f1ea21/volumes" Oct 11 05:09:20 crc kubenswrapper[4651]: I1011 05:09:20.034206 4651 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-internal-api-0" podUID="5c04eb41-eb14-4687-a0fa-56f07612da15" containerName="glance-httpd" probeResult="failure" output="Get \"https://10.217.0.157:9292/healthcheck\": read tcp 10.217.0.2:37896->10.217.0.157:9292: read: connection reset by peer" Oct 11 05:09:20 crc kubenswrapper[4651]: I1011 05:09:20.034942 4651 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-internal-api-0" podUID="5c04eb41-eb14-4687-a0fa-56f07612da15" containerName="glance-log" probeResult="failure" output="Get \"https://10.217.0.157:9292/healthcheck\": read tcp 10.217.0.2:37898->10.217.0.157:9292: read: connection reset by peer" Oct 11 05:09:20 crc kubenswrapper[4651]: I1011 05:09:20.476206 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 11 05:09:20 crc kubenswrapper[4651]: I1011 05:09:20.486177 4651 generic.go:334] "Generic (PLEG): container finished" podID="5c04eb41-eb14-4687-a0fa-56f07612da15" containerID="1510c5a0e22592e06028411eccbbe08cd69f4981baf82da22394ea597b956109" exitCode=0 Oct 11 05:09:20 crc kubenswrapper[4651]: I1011 05:09:20.486263 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"5c04eb41-eb14-4687-a0fa-56f07612da15","Type":"ContainerDied","Data":"1510c5a0e22592e06028411eccbbe08cd69f4981baf82da22394ea597b956109"} Oct 11 05:09:20 crc kubenswrapper[4651]: I1011 05:09:20.571750 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 11 05:09:20 crc kubenswrapper[4651]: I1011 05:09:20.656125 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c04eb41-eb14-4687-a0fa-56f07612da15-combined-ca-bundle\") pod \"5c04eb41-eb14-4687-a0fa-56f07612da15\" (UID: \"5c04eb41-eb14-4687-a0fa-56f07612da15\") " Oct 11 05:09:20 crc kubenswrapper[4651]: I1011 05:09:20.656196 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b4t6z\" (UniqueName: \"kubernetes.io/projected/5c04eb41-eb14-4687-a0fa-56f07612da15-kube-api-access-b4t6z\") pod \"5c04eb41-eb14-4687-a0fa-56f07612da15\" (UID: \"5c04eb41-eb14-4687-a0fa-56f07612da15\") " Oct 11 05:09:20 crc kubenswrapper[4651]: I1011 05:09:20.656270 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5c04eb41-eb14-4687-a0fa-56f07612da15-httpd-run\") pod \"5c04eb41-eb14-4687-a0fa-56f07612da15\" (UID: \"5c04eb41-eb14-4687-a0fa-56f07612da15\") " Oct 11 05:09:20 crc kubenswrapper[4651]: I1011 05:09:20.656309 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c04eb41-eb14-4687-a0fa-56f07612da15-config-data\") pod \"5c04eb41-eb14-4687-a0fa-56f07612da15\" (UID: \"5c04eb41-eb14-4687-a0fa-56f07612da15\") " Oct 11 05:09:20 crc kubenswrapper[4651]: I1011 05:09:20.656340 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5c04eb41-eb14-4687-a0fa-56f07612da15-internal-tls-certs\") pod \"5c04eb41-eb14-4687-a0fa-56f07612da15\" (UID: \"5c04eb41-eb14-4687-a0fa-56f07612da15\") " Oct 11 05:09:20 crc kubenswrapper[4651]: I1011 05:09:20.656387 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5c04eb41-eb14-4687-a0fa-56f07612da15-scripts\") pod \"5c04eb41-eb14-4687-a0fa-56f07612da15\" (UID: \"5c04eb41-eb14-4687-a0fa-56f07612da15\") " Oct 11 05:09:20 crc kubenswrapper[4651]: I1011 05:09:20.656463 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5c04eb41-eb14-4687-a0fa-56f07612da15-logs\") pod \"5c04eb41-eb14-4687-a0fa-56f07612da15\" (UID: \"5c04eb41-eb14-4687-a0fa-56f07612da15\") " Oct 11 05:09:20 crc kubenswrapper[4651]: I1011 05:09:20.657038 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"5c04eb41-eb14-4687-a0fa-56f07612da15\" (UID: \"5c04eb41-eb14-4687-a0fa-56f07612da15\") " Oct 11 05:09:20 crc kubenswrapper[4651]: I1011 05:09:20.658833 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5c04eb41-eb14-4687-a0fa-56f07612da15-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "5c04eb41-eb14-4687-a0fa-56f07612da15" (UID: "5c04eb41-eb14-4687-a0fa-56f07612da15"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 05:09:20 crc kubenswrapper[4651]: I1011 05:09:20.659048 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5c04eb41-eb14-4687-a0fa-56f07612da15-logs" (OuterVolumeSpecName: "logs") pod "5c04eb41-eb14-4687-a0fa-56f07612da15" (UID: "5c04eb41-eb14-4687-a0fa-56f07612da15"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 05:09:20 crc kubenswrapper[4651]: I1011 05:09:20.664021 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage02-crc" (OuterVolumeSpecName: "glance") pod "5c04eb41-eb14-4687-a0fa-56f07612da15" (UID: "5c04eb41-eb14-4687-a0fa-56f07612da15"). InnerVolumeSpecName "local-storage02-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 11 05:09:20 crc kubenswrapper[4651]: I1011 05:09:20.674233 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5c04eb41-eb14-4687-a0fa-56f07612da15-scripts" (OuterVolumeSpecName: "scripts") pod "5c04eb41-eb14-4687-a0fa-56f07612da15" (UID: "5c04eb41-eb14-4687-a0fa-56f07612da15"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:09:20 crc kubenswrapper[4651]: I1011 05:09:20.679365 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5c04eb41-eb14-4687-a0fa-56f07612da15-kube-api-access-b4t6z" (OuterVolumeSpecName: "kube-api-access-b4t6z") pod "5c04eb41-eb14-4687-a0fa-56f07612da15" (UID: "5c04eb41-eb14-4687-a0fa-56f07612da15"). InnerVolumeSpecName "kube-api-access-b4t6z". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:09:20 crc kubenswrapper[4651]: I1011 05:09:20.744168 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5c04eb41-eb14-4687-a0fa-56f07612da15-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5c04eb41-eb14-4687-a0fa-56f07612da15" (UID: "5c04eb41-eb14-4687-a0fa-56f07612da15"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:09:20 crc kubenswrapper[4651]: I1011 05:09:20.758174 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5c04eb41-eb14-4687-a0fa-56f07612da15-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "5c04eb41-eb14-4687-a0fa-56f07612da15" (UID: "5c04eb41-eb14-4687-a0fa-56f07612da15"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:09:20 crc kubenswrapper[4651]: I1011 05:09:20.759520 4651 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5c04eb41-eb14-4687-a0fa-56f07612da15-logs\") on node \"crc\" DevicePath \"\"" Oct 11 05:09:20 crc kubenswrapper[4651]: I1011 05:09:20.759579 4651 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" " Oct 11 05:09:20 crc kubenswrapper[4651]: I1011 05:09:20.759593 4651 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c04eb41-eb14-4687-a0fa-56f07612da15-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 05:09:20 crc kubenswrapper[4651]: I1011 05:09:20.759607 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b4t6z\" (UniqueName: \"kubernetes.io/projected/5c04eb41-eb14-4687-a0fa-56f07612da15-kube-api-access-b4t6z\") on node \"crc\" DevicePath \"\"" Oct 11 05:09:20 crc kubenswrapper[4651]: I1011 05:09:20.759621 4651 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5c04eb41-eb14-4687-a0fa-56f07612da15-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 11 05:09:20 crc kubenswrapper[4651]: I1011 05:09:20.759632 4651 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5c04eb41-eb14-4687-a0fa-56f07612da15-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 11 05:09:20 crc kubenswrapper[4651]: I1011 05:09:20.759641 4651 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5c04eb41-eb14-4687-a0fa-56f07612da15-scripts\") on node \"crc\" DevicePath \"\"" Oct 11 05:09:20 crc kubenswrapper[4651]: I1011 05:09:20.764174 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5c04eb41-eb14-4687-a0fa-56f07612da15-config-data" (OuterVolumeSpecName: "config-data") pod "5c04eb41-eb14-4687-a0fa-56f07612da15" (UID: "5c04eb41-eb14-4687-a0fa-56f07612da15"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:09:20 crc kubenswrapper[4651]: I1011 05:09:20.805072 4651 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage02-crc" (UniqueName: "kubernetes.io/local-volume/local-storage02-crc") on node "crc" Oct 11 05:09:20 crc kubenswrapper[4651]: I1011 05:09:20.861065 4651 reconciler_common.go:293] "Volume detached for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" DevicePath \"\"" Oct 11 05:09:20 crc kubenswrapper[4651]: I1011 05:09:20.861091 4651 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c04eb41-eb14-4687-a0fa-56f07612da15-config-data\") on node \"crc\" DevicePath \"\"" Oct 11 05:09:20 crc kubenswrapper[4651]: I1011 05:09:20.940019 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-p2nff" Oct 11 05:09:21 crc kubenswrapper[4651]: I1011 05:09:21.022182 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-stlvp" Oct 11 05:09:21 crc kubenswrapper[4651]: I1011 05:09:21.059885 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-vjf9n" Oct 11 05:09:21 crc kubenswrapper[4651]: I1011 05:09:21.064251 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zh7xk\" (UniqueName: \"kubernetes.io/projected/1bbe8f4f-c85e-46aa-a214-e28fad1722dc-kube-api-access-zh7xk\") pod \"1bbe8f4f-c85e-46aa-a214-e28fad1722dc\" (UID: \"1bbe8f4f-c85e-46aa-a214-e28fad1722dc\") " Oct 11 05:09:21 crc kubenswrapper[4651]: I1011 05:09:21.064376 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-srpzt\" (UniqueName: \"kubernetes.io/projected/3cb7ede0-6fea-4867-941e-13a4c5637543-kube-api-access-srpzt\") pod \"3cb7ede0-6fea-4867-941e-13a4c5637543\" (UID: \"3cb7ede0-6fea-4867-941e-13a4c5637543\") " Oct 11 05:09:21 crc kubenswrapper[4651]: I1011 05:09:21.069976 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb7ede0-6fea-4867-941e-13a4c5637543-kube-api-access-srpzt" (OuterVolumeSpecName: "kube-api-access-srpzt") pod "3cb7ede0-6fea-4867-941e-13a4c5637543" (UID: "3cb7ede0-6fea-4867-941e-13a4c5637543"). InnerVolumeSpecName "kube-api-access-srpzt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:09:21 crc kubenswrapper[4651]: I1011 05:09:21.070026 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bbe8f4f-c85e-46aa-a214-e28fad1722dc-kube-api-access-zh7xk" (OuterVolumeSpecName: "kube-api-access-zh7xk") pod "1bbe8f4f-c85e-46aa-a214-e28fad1722dc" (UID: "1bbe8f4f-c85e-46aa-a214-e28fad1722dc"). InnerVolumeSpecName "kube-api-access-zh7xk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:09:21 crc kubenswrapper[4651]: I1011 05:09:21.165837 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gm86v\" (UniqueName: \"kubernetes.io/projected/b608bb15-f67e-4192-b820-29752a3cd443-kube-api-access-gm86v\") pod \"b608bb15-f67e-4192-b820-29752a3cd443\" (UID: \"b608bb15-f67e-4192-b820-29752a3cd443\") " Oct 11 05:09:21 crc kubenswrapper[4651]: I1011 05:09:21.166677 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zh7xk\" (UniqueName: \"kubernetes.io/projected/1bbe8f4f-c85e-46aa-a214-e28fad1722dc-kube-api-access-zh7xk\") on node \"crc\" DevicePath \"\"" Oct 11 05:09:21 crc kubenswrapper[4651]: I1011 05:09:21.166770 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-srpzt\" (UniqueName: \"kubernetes.io/projected/3cb7ede0-6fea-4867-941e-13a4c5637543-kube-api-access-srpzt\") on node \"crc\" DevicePath \"\"" Oct 11 05:09:21 crc kubenswrapper[4651]: I1011 05:09:21.169584 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b608bb15-f67e-4192-b820-29752a3cd443-kube-api-access-gm86v" (OuterVolumeSpecName: "kube-api-access-gm86v") pod "b608bb15-f67e-4192-b820-29752a3cd443" (UID: "b608bb15-f67e-4192-b820-29752a3cd443"). InnerVolumeSpecName "kube-api-access-gm86v". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:09:21 crc kubenswrapper[4651]: I1011 05:09:21.268028 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gm86v\" (UniqueName: \"kubernetes.io/projected/b608bb15-f67e-4192-b820-29752a3cd443-kube-api-access-gm86v\") on node \"crc\" DevicePath \"\"" Oct 11 05:09:21 crc kubenswrapper[4651]: I1011 05:09:21.502670 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"ee7479a2-fd16-4ec2-a0e0-28fbaffbce9f","Type":"ContainerStarted","Data":"3bc1a80850d26f91304ef49c1b03606e0c21afa8501cfc5b3c07d9530dc8f77f"} Oct 11 05:09:21 crc kubenswrapper[4651]: I1011 05:09:21.502721 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"ee7479a2-fd16-4ec2-a0e0-28fbaffbce9f","Type":"ContainerStarted","Data":"5d8e3fac23b8802449adb97314896939d9957c4920fc0eed77a18c6057cbb812"} Oct 11 05:09:21 crc kubenswrapper[4651]: I1011 05:09:21.504667 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-p2nff" event={"ID":"3cb7ede0-6fea-4867-941e-13a4c5637543","Type":"ContainerDied","Data":"17c260016cab529da5c1f8a84035c3d8f47d0277c3ac62d9d57ffd0d569f9e3c"} Oct 11 05:09:21 crc kubenswrapper[4651]: I1011 05:09:21.504699 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-p2nff" Oct 11 05:09:21 crc kubenswrapper[4651]: I1011 05:09:21.504702 4651 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="17c260016cab529da5c1f8a84035c3d8f47d0277c3ac62d9d57ffd0d569f9e3c" Oct 11 05:09:21 crc kubenswrapper[4651]: I1011 05:09:21.507450 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-stlvp" event={"ID":"1bbe8f4f-c85e-46aa-a214-e28fad1722dc","Type":"ContainerDied","Data":"879ace51e67888bdbba03be782d5876c7352a92c3019cb8dac3d0010a9289e02"} Oct 11 05:09:21 crc kubenswrapper[4651]: I1011 05:09:21.507471 4651 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="879ace51e67888bdbba03be782d5876c7352a92c3019cb8dac3d0010a9289e02" Oct 11 05:09:21 crc kubenswrapper[4651]: I1011 05:09:21.507482 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-stlvp" Oct 11 05:09:21 crc kubenswrapper[4651]: I1011 05:09:21.522686 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"5c04eb41-eb14-4687-a0fa-56f07612da15","Type":"ContainerDied","Data":"0feb0045d385ab90a5341d4e00169b66f6ebeafe187fa06edb2be9a02a78e5c7"} Oct 11 05:09:21 crc kubenswrapper[4651]: I1011 05:09:21.522743 4651 scope.go:117] "RemoveContainer" containerID="1510c5a0e22592e06028411eccbbe08cd69f4981baf82da22394ea597b956109" Oct 11 05:09:21 crc kubenswrapper[4651]: I1011 05:09:21.522741 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 11 05:09:21 crc kubenswrapper[4651]: I1011 05:09:21.528409 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8d85f94e-09fd-4655-a708-0ff3a88e3c35","Type":"ContainerStarted","Data":"c60aba242817d9fae5ed46b210a38f634f12c9c73bfcc27f5338ff60f506db79"} Oct 11 05:09:21 crc kubenswrapper[4651]: I1011 05:09:21.530201 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="8d85f94e-09fd-4655-a708-0ff3a88e3c35" containerName="ceilometer-central-agent" containerID="cri-o://ada35ed2843e8625fb4507f6a29a52188baee8423ccdea3342aba8b95de44a57" gracePeriod=30 Oct 11 05:09:21 crc kubenswrapper[4651]: I1011 05:09:21.530289 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 11 05:09:21 crc kubenswrapper[4651]: I1011 05:09:21.530541 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="8d85f94e-09fd-4655-a708-0ff3a88e3c35" containerName="proxy-httpd" containerID="cri-o://c60aba242817d9fae5ed46b210a38f634f12c9c73bfcc27f5338ff60f506db79" gracePeriod=30 Oct 11 05:09:21 crc kubenswrapper[4651]: I1011 05:09:21.530585 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="8d85f94e-09fd-4655-a708-0ff3a88e3c35" containerName="sg-core" containerID="cri-o://5aac2f5b65caacf570d31883365d712a9c8e515d3e45b7af4580dce080daf218" gracePeriod=30 Oct 11 05:09:21 crc kubenswrapper[4651]: I1011 05:09:21.530617 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="8d85f94e-09fd-4655-a708-0ff3a88e3c35" containerName="ceilometer-notification-agent" containerID="cri-o://fac62b9075fd04a29b5e17546553586969e4a45b32c113f37b1faeea7d210571" gracePeriod=30 Oct 11 05:09:21 crc kubenswrapper[4651]: I1011 05:09:21.553672 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-vjf9n" event={"ID":"b608bb15-f67e-4192-b820-29752a3cd443","Type":"ContainerDied","Data":"27824a64591ea21fe61e73bf7f244425ee6fe86c2a6e63052d95d61308b077a7"} Oct 11 05:09:21 crc kubenswrapper[4651]: I1011 05:09:21.553709 4651 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="27824a64591ea21fe61e73bf7f244425ee6fe86c2a6e63052d95d61308b077a7" Oct 11 05:09:21 crc kubenswrapper[4651]: I1011 05:09:21.553842 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-vjf9n" Oct 11 05:09:21 crc kubenswrapper[4651]: I1011 05:09:21.636035 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.319497807 podStartE2EDuration="5.636009449s" podCreationTimestamp="2025-10-11 05:09:16 +0000 UTC" firstStartedPulling="2025-10-11 05:09:17.289413554 +0000 UTC m=+1078.185646350" lastFinishedPulling="2025-10-11 05:09:20.605925196 +0000 UTC m=+1081.502157992" observedRunningTime="2025-10-11 05:09:21.59047784 +0000 UTC m=+1082.486710636" watchObservedRunningTime="2025-10-11 05:09:21.636009449 +0000 UTC m=+1082.532242245" Oct 11 05:09:21 crc kubenswrapper[4651]: I1011 05:09:21.645883 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 11 05:09:21 crc kubenswrapper[4651]: I1011 05:09:21.656498 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 11 05:09:21 crc kubenswrapper[4651]: I1011 05:09:21.694187 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 11 05:09:21 crc kubenswrapper[4651]: E1011 05:09:21.694571 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c04eb41-eb14-4687-a0fa-56f07612da15" containerName="glance-log" Oct 11 05:09:21 crc kubenswrapper[4651]: I1011 05:09:21.694591 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c04eb41-eb14-4687-a0fa-56f07612da15" containerName="glance-log" Oct 11 05:09:21 crc kubenswrapper[4651]: E1011 05:09:21.694605 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1bbe8f4f-c85e-46aa-a214-e28fad1722dc" containerName="mariadb-database-create" Oct 11 05:09:21 crc kubenswrapper[4651]: I1011 05:09:21.694612 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="1bbe8f4f-c85e-46aa-a214-e28fad1722dc" containerName="mariadb-database-create" Oct 11 05:09:21 crc kubenswrapper[4651]: E1011 05:09:21.694623 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c04eb41-eb14-4687-a0fa-56f07612da15" containerName="glance-httpd" Oct 11 05:09:21 crc kubenswrapper[4651]: I1011 05:09:21.694629 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c04eb41-eb14-4687-a0fa-56f07612da15" containerName="glance-httpd" Oct 11 05:09:21 crc kubenswrapper[4651]: E1011 05:09:21.694648 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3cb7ede0-6fea-4867-941e-13a4c5637543" containerName="mariadb-database-create" Oct 11 05:09:21 crc kubenswrapper[4651]: I1011 05:09:21.694654 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="3cb7ede0-6fea-4867-941e-13a4c5637543" containerName="mariadb-database-create" Oct 11 05:09:21 crc kubenswrapper[4651]: E1011 05:09:21.694669 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b608bb15-f67e-4192-b820-29752a3cd443" containerName="mariadb-database-create" Oct 11 05:09:21 crc kubenswrapper[4651]: I1011 05:09:21.694674 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="b608bb15-f67e-4192-b820-29752a3cd443" containerName="mariadb-database-create" Oct 11 05:09:21 crc kubenswrapper[4651]: I1011 05:09:21.694847 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="b608bb15-f67e-4192-b820-29752a3cd443" containerName="mariadb-database-create" Oct 11 05:09:21 crc kubenswrapper[4651]: I1011 05:09:21.694865 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="5c04eb41-eb14-4687-a0fa-56f07612da15" containerName="glance-log" Oct 11 05:09:21 crc kubenswrapper[4651]: I1011 05:09:21.694877 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="3cb7ede0-6fea-4867-941e-13a4c5637543" containerName="mariadb-database-create" Oct 11 05:09:21 crc kubenswrapper[4651]: I1011 05:09:21.694885 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="1bbe8f4f-c85e-46aa-a214-e28fad1722dc" containerName="mariadb-database-create" Oct 11 05:09:21 crc kubenswrapper[4651]: I1011 05:09:21.694896 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="5c04eb41-eb14-4687-a0fa-56f07612da15" containerName="glance-httpd" Oct 11 05:09:21 crc kubenswrapper[4651]: I1011 05:09:21.695787 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 11 05:09:21 crc kubenswrapper[4651]: I1011 05:09:21.711969 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 11 05:09:21 crc kubenswrapper[4651]: I1011 05:09:21.725682 4651 scope.go:117] "RemoveContainer" containerID="dc526a3f9f7a83761b341c698b7ac5c3eea9e0a70fbc06131f5b4708b2e6dd8d" Oct 11 05:09:21 crc kubenswrapper[4651]: I1011 05:09:21.733596 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Oct 11 05:09:21 crc kubenswrapper[4651]: I1011 05:09:21.733810 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Oct 11 05:09:21 crc kubenswrapper[4651]: I1011 05:09:21.884589 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5c04eb41-eb14-4687-a0fa-56f07612da15" path="/var/lib/kubelet/pods/5c04eb41-eb14-4687-a0fa-56f07612da15/volumes" Oct 11 05:09:21 crc kubenswrapper[4651]: I1011 05:09:21.884787 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3a991cdf-ad8d-4392-bb4e-792e607d740c-scripts\") pod \"glance-default-internal-api-0\" (UID: \"3a991cdf-ad8d-4392-bb4e-792e607d740c\") " pod="openstack/glance-default-internal-api-0" Oct 11 05:09:21 crc kubenswrapper[4651]: I1011 05:09:21.884905 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3a991cdf-ad8d-4392-bb4e-792e607d740c-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"3a991cdf-ad8d-4392-bb4e-792e607d740c\") " pod="openstack/glance-default-internal-api-0" Oct 11 05:09:21 crc kubenswrapper[4651]: I1011 05:09:21.885008 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/3a991cdf-ad8d-4392-bb4e-792e607d740c-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"3a991cdf-ad8d-4392-bb4e-792e607d740c\") " pod="openstack/glance-default-internal-api-0" Oct 11 05:09:21 crc kubenswrapper[4651]: I1011 05:09:21.885050 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3a991cdf-ad8d-4392-bb4e-792e607d740c-logs\") pod \"glance-default-internal-api-0\" (UID: \"3a991cdf-ad8d-4392-bb4e-792e607d740c\") " pod="openstack/glance-default-internal-api-0" Oct 11 05:09:21 crc kubenswrapper[4651]: I1011 05:09:21.885071 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a991cdf-ad8d-4392-bb4e-792e607d740c-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"3a991cdf-ad8d-4392-bb4e-792e607d740c\") " pod="openstack/glance-default-internal-api-0" Oct 11 05:09:21 crc kubenswrapper[4651]: I1011 05:09:21.885089 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wdjvm\" (UniqueName: \"kubernetes.io/projected/3a991cdf-ad8d-4392-bb4e-792e607d740c-kube-api-access-wdjvm\") pod \"glance-default-internal-api-0\" (UID: \"3a991cdf-ad8d-4392-bb4e-792e607d740c\") " pod="openstack/glance-default-internal-api-0" Oct 11 05:09:21 crc kubenswrapper[4651]: I1011 05:09:21.885105 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-internal-api-0\" (UID: \"3a991cdf-ad8d-4392-bb4e-792e607d740c\") " pod="openstack/glance-default-internal-api-0" Oct 11 05:09:21 crc kubenswrapper[4651]: I1011 05:09:21.885130 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3a991cdf-ad8d-4392-bb4e-792e607d740c-config-data\") pod \"glance-default-internal-api-0\" (UID: \"3a991cdf-ad8d-4392-bb4e-792e607d740c\") " pod="openstack/glance-default-internal-api-0" Oct 11 05:09:21 crc kubenswrapper[4651]: I1011 05:09:21.986876 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3a991cdf-ad8d-4392-bb4e-792e607d740c-config-data\") pod \"glance-default-internal-api-0\" (UID: \"3a991cdf-ad8d-4392-bb4e-792e607d740c\") " pod="openstack/glance-default-internal-api-0" Oct 11 05:09:21 crc kubenswrapper[4651]: I1011 05:09:21.987082 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3a991cdf-ad8d-4392-bb4e-792e607d740c-scripts\") pod \"glance-default-internal-api-0\" (UID: \"3a991cdf-ad8d-4392-bb4e-792e607d740c\") " pod="openstack/glance-default-internal-api-0" Oct 11 05:09:21 crc kubenswrapper[4651]: I1011 05:09:21.987885 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3a991cdf-ad8d-4392-bb4e-792e607d740c-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"3a991cdf-ad8d-4392-bb4e-792e607d740c\") " pod="openstack/glance-default-internal-api-0" Oct 11 05:09:21 crc kubenswrapper[4651]: I1011 05:09:21.989131 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/3a991cdf-ad8d-4392-bb4e-792e607d740c-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"3a991cdf-ad8d-4392-bb4e-792e607d740c\") " pod="openstack/glance-default-internal-api-0" Oct 11 05:09:21 crc kubenswrapper[4651]: I1011 05:09:21.989503 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3a991cdf-ad8d-4392-bb4e-792e607d740c-logs\") pod \"glance-default-internal-api-0\" (UID: \"3a991cdf-ad8d-4392-bb4e-792e607d740c\") " pod="openstack/glance-default-internal-api-0" Oct 11 05:09:21 crc kubenswrapper[4651]: I1011 05:09:21.989639 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a991cdf-ad8d-4392-bb4e-792e607d740c-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"3a991cdf-ad8d-4392-bb4e-792e607d740c\") " pod="openstack/glance-default-internal-api-0" Oct 11 05:09:21 crc kubenswrapper[4651]: I1011 05:09:21.989740 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wdjvm\" (UniqueName: \"kubernetes.io/projected/3a991cdf-ad8d-4392-bb4e-792e607d740c-kube-api-access-wdjvm\") pod \"glance-default-internal-api-0\" (UID: \"3a991cdf-ad8d-4392-bb4e-792e607d740c\") " pod="openstack/glance-default-internal-api-0" Oct 11 05:09:21 crc kubenswrapper[4651]: I1011 05:09:21.990047 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3a991cdf-ad8d-4392-bb4e-792e607d740c-logs\") pod \"glance-default-internal-api-0\" (UID: \"3a991cdf-ad8d-4392-bb4e-792e607d740c\") " pod="openstack/glance-default-internal-api-0" Oct 11 05:09:21 crc kubenswrapper[4651]: I1011 05:09:21.992135 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3a991cdf-ad8d-4392-bb4e-792e607d740c-config-data\") pod \"glance-default-internal-api-0\" (UID: \"3a991cdf-ad8d-4392-bb4e-792e607d740c\") " pod="openstack/glance-default-internal-api-0" Oct 11 05:09:21 crc kubenswrapper[4651]: I1011 05:09:21.994660 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a991cdf-ad8d-4392-bb4e-792e607d740c-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"3a991cdf-ad8d-4392-bb4e-792e607d740c\") " pod="openstack/glance-default-internal-api-0" Oct 11 05:09:21 crc kubenswrapper[4651]: I1011 05:09:21.995082 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3a991cdf-ad8d-4392-bb4e-792e607d740c-scripts\") pod \"glance-default-internal-api-0\" (UID: \"3a991cdf-ad8d-4392-bb4e-792e607d740c\") " pod="openstack/glance-default-internal-api-0" Oct 11 05:09:21 crc kubenswrapper[4651]: I1011 05:09:21.995607 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3a991cdf-ad8d-4392-bb4e-792e607d740c-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"3a991cdf-ad8d-4392-bb4e-792e607d740c\") " pod="openstack/glance-default-internal-api-0" Oct 11 05:09:21 crc kubenswrapper[4651]: I1011 05:09:21.989740 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/3a991cdf-ad8d-4392-bb4e-792e607d740c-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"3a991cdf-ad8d-4392-bb4e-792e607d740c\") " pod="openstack/glance-default-internal-api-0" Oct 11 05:09:21 crc kubenswrapper[4651]: I1011 05:09:21.995884 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-internal-api-0\" (UID: \"3a991cdf-ad8d-4392-bb4e-792e607d740c\") " pod="openstack/glance-default-internal-api-0" Oct 11 05:09:21 crc kubenswrapper[4651]: I1011 05:09:21.996184 4651 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-internal-api-0\" (UID: \"3a991cdf-ad8d-4392-bb4e-792e607d740c\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/glance-default-internal-api-0" Oct 11 05:09:22 crc kubenswrapper[4651]: I1011 05:09:22.010172 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wdjvm\" (UniqueName: \"kubernetes.io/projected/3a991cdf-ad8d-4392-bb4e-792e607d740c-kube-api-access-wdjvm\") pod \"glance-default-internal-api-0\" (UID: \"3a991cdf-ad8d-4392-bb4e-792e607d740c\") " pod="openstack/glance-default-internal-api-0" Oct 11 05:09:22 crc kubenswrapper[4651]: I1011 05:09:22.024146 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-internal-api-0\" (UID: \"3a991cdf-ad8d-4392-bb4e-792e607d740c\") " pod="openstack/glance-default-internal-api-0" Oct 11 05:09:22 crc kubenswrapper[4651]: I1011 05:09:22.060586 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 11 05:09:22 crc kubenswrapper[4651]: I1011 05:09:22.565551 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"ee7479a2-fd16-4ec2-a0e0-28fbaffbce9f","Type":"ContainerStarted","Data":"b3aa21c89ca0416f091ece3e81c4ddf34c721fd541ec890993be745d547689dd"} Oct 11 05:09:22 crc kubenswrapper[4651]: I1011 05:09:22.571380 4651 generic.go:334] "Generic (PLEG): container finished" podID="8d85f94e-09fd-4655-a708-0ff3a88e3c35" containerID="c60aba242817d9fae5ed46b210a38f634f12c9c73bfcc27f5338ff60f506db79" exitCode=0 Oct 11 05:09:22 crc kubenswrapper[4651]: I1011 05:09:22.571407 4651 generic.go:334] "Generic (PLEG): container finished" podID="8d85f94e-09fd-4655-a708-0ff3a88e3c35" containerID="5aac2f5b65caacf570d31883365d712a9c8e515d3e45b7af4580dce080daf218" exitCode=2 Oct 11 05:09:22 crc kubenswrapper[4651]: I1011 05:09:22.571415 4651 generic.go:334] "Generic (PLEG): container finished" podID="8d85f94e-09fd-4655-a708-0ff3a88e3c35" containerID="fac62b9075fd04a29b5e17546553586969e4a45b32c113f37b1faeea7d210571" exitCode=0 Oct 11 05:09:22 crc kubenswrapper[4651]: I1011 05:09:22.571437 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8d85f94e-09fd-4655-a708-0ff3a88e3c35","Type":"ContainerDied","Data":"c60aba242817d9fae5ed46b210a38f634f12c9c73bfcc27f5338ff60f506db79"} Oct 11 05:09:22 crc kubenswrapper[4651]: I1011 05:09:22.571460 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8d85f94e-09fd-4655-a708-0ff3a88e3c35","Type":"ContainerDied","Data":"5aac2f5b65caacf570d31883365d712a9c8e515d3e45b7af4580dce080daf218"} Oct 11 05:09:22 crc kubenswrapper[4651]: I1011 05:09:22.571471 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8d85f94e-09fd-4655-a708-0ff3a88e3c35","Type":"ContainerDied","Data":"fac62b9075fd04a29b5e17546553586969e4a45b32c113f37b1faeea7d210571"} Oct 11 05:09:22 crc kubenswrapper[4651]: I1011 05:09:22.594885 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=3.594867057 podStartE2EDuration="3.594867057s" podCreationTimestamp="2025-10-11 05:09:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 05:09:22.592845046 +0000 UTC m=+1083.489077852" watchObservedRunningTime="2025-10-11 05:09:22.594867057 +0000 UTC m=+1083.491099853" Oct 11 05:09:22 crc kubenswrapper[4651]: I1011 05:09:22.619443 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 11 05:09:22 crc kubenswrapper[4651]: W1011 05:09:22.624382 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3a991cdf_ad8d_4392_bb4e_792e607d740c.slice/crio-01e454cad9b9cd99b93a294c47335e4023f49764a242e2434989ece1d64a44f8 WatchSource:0}: Error finding container 01e454cad9b9cd99b93a294c47335e4023f49764a242e2434989ece1d64a44f8: Status 404 returned error can't find the container with id 01e454cad9b9cd99b93a294c47335e4023f49764a242e2434989ece1d64a44f8 Oct 11 05:09:23 crc kubenswrapper[4651]: I1011 05:09:23.587472 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"3a991cdf-ad8d-4392-bb4e-792e607d740c","Type":"ContainerStarted","Data":"49b4eb230a1f688f237e9393f9c11cf025dfbc583e57fef6529fbbbc2b5b1edb"} Oct 11 05:09:23 crc kubenswrapper[4651]: I1011 05:09:23.587779 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"3a991cdf-ad8d-4392-bb4e-792e607d740c","Type":"ContainerStarted","Data":"01e454cad9b9cd99b93a294c47335e4023f49764a242e2434989ece1d64a44f8"} Oct 11 05:09:24 crc kubenswrapper[4651]: I1011 05:09:24.598276 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"3a991cdf-ad8d-4392-bb4e-792e607d740c","Type":"ContainerStarted","Data":"2b7b2bb21c90290e30ab1c0b312e9cb185f1bcec1b0f02755ea18e48485ea9f8"} Oct 11 05:09:24 crc kubenswrapper[4651]: I1011 05:09:24.618912 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=3.618887511 podStartE2EDuration="3.618887511s" podCreationTimestamp="2025-10-11 05:09:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 05:09:24.61645955 +0000 UTC m=+1085.512692356" watchObservedRunningTime="2025-10-11 05:09:24.618887511 +0000 UTC m=+1085.515120317" Oct 11 05:09:25 crc kubenswrapper[4651]: I1011 05:09:25.167305 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 11 05:09:25 crc kubenswrapper[4651]: I1011 05:09:25.254882 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d85f94e-09fd-4655-a708-0ff3a88e3c35-combined-ca-bundle\") pod \"8d85f94e-09fd-4655-a708-0ff3a88e3c35\" (UID: \"8d85f94e-09fd-4655-a708-0ff3a88e3c35\") " Oct 11 05:09:25 crc kubenswrapper[4651]: I1011 05:09:25.254981 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8d85f94e-09fd-4655-a708-0ff3a88e3c35-log-httpd\") pod \"8d85f94e-09fd-4655-a708-0ff3a88e3c35\" (UID: \"8d85f94e-09fd-4655-a708-0ff3a88e3c35\") " Oct 11 05:09:25 crc kubenswrapper[4651]: I1011 05:09:25.255615 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8d85f94e-09fd-4655-a708-0ff3a88e3c35-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "8d85f94e-09fd-4655-a708-0ff3a88e3c35" (UID: "8d85f94e-09fd-4655-a708-0ff3a88e3c35"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 05:09:25 crc kubenswrapper[4651]: I1011 05:09:25.255686 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8d85f94e-09fd-4655-a708-0ff3a88e3c35-scripts\") pod \"8d85f94e-09fd-4655-a708-0ff3a88e3c35\" (UID: \"8d85f94e-09fd-4655-a708-0ff3a88e3c35\") " Oct 11 05:09:25 crc kubenswrapper[4651]: I1011 05:09:25.256230 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d85f94e-09fd-4655-a708-0ff3a88e3c35-config-data\") pod \"8d85f94e-09fd-4655-a708-0ff3a88e3c35\" (UID: \"8d85f94e-09fd-4655-a708-0ff3a88e3c35\") " Oct 11 05:09:25 crc kubenswrapper[4651]: I1011 05:09:25.256955 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8d85f94e-09fd-4655-a708-0ff3a88e3c35-sg-core-conf-yaml\") pod \"8d85f94e-09fd-4655-a708-0ff3a88e3c35\" (UID: \"8d85f94e-09fd-4655-a708-0ff3a88e3c35\") " Oct 11 05:09:25 crc kubenswrapper[4651]: I1011 05:09:25.257055 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/8d85f94e-09fd-4655-a708-0ff3a88e3c35-ceilometer-tls-certs\") pod \"8d85f94e-09fd-4655-a708-0ff3a88e3c35\" (UID: \"8d85f94e-09fd-4655-a708-0ff3a88e3c35\") " Oct 11 05:09:25 crc kubenswrapper[4651]: I1011 05:09:25.257272 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8d85f94e-09fd-4655-a708-0ff3a88e3c35-run-httpd\") pod \"8d85f94e-09fd-4655-a708-0ff3a88e3c35\" (UID: \"8d85f94e-09fd-4655-a708-0ff3a88e3c35\") " Oct 11 05:09:25 crc kubenswrapper[4651]: I1011 05:09:25.257375 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c28t4\" (UniqueName: \"kubernetes.io/projected/8d85f94e-09fd-4655-a708-0ff3a88e3c35-kube-api-access-c28t4\") pod \"8d85f94e-09fd-4655-a708-0ff3a88e3c35\" (UID: \"8d85f94e-09fd-4655-a708-0ff3a88e3c35\") " Oct 11 05:09:25 crc kubenswrapper[4651]: I1011 05:09:25.257918 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8d85f94e-09fd-4655-a708-0ff3a88e3c35-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "8d85f94e-09fd-4655-a708-0ff3a88e3c35" (UID: "8d85f94e-09fd-4655-a708-0ff3a88e3c35"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 05:09:25 crc kubenswrapper[4651]: I1011 05:09:25.258749 4651 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8d85f94e-09fd-4655-a708-0ff3a88e3c35-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 11 05:09:25 crc kubenswrapper[4651]: I1011 05:09:25.258779 4651 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8d85f94e-09fd-4655-a708-0ff3a88e3c35-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 11 05:09:25 crc kubenswrapper[4651]: I1011 05:09:25.281346 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8d85f94e-09fd-4655-a708-0ff3a88e3c35-kube-api-access-c28t4" (OuterVolumeSpecName: "kube-api-access-c28t4") pod "8d85f94e-09fd-4655-a708-0ff3a88e3c35" (UID: "8d85f94e-09fd-4655-a708-0ff3a88e3c35"). InnerVolumeSpecName "kube-api-access-c28t4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:09:25 crc kubenswrapper[4651]: I1011 05:09:25.287054 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8d85f94e-09fd-4655-a708-0ff3a88e3c35-scripts" (OuterVolumeSpecName: "scripts") pod "8d85f94e-09fd-4655-a708-0ff3a88e3c35" (UID: "8d85f94e-09fd-4655-a708-0ff3a88e3c35"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:09:25 crc kubenswrapper[4651]: I1011 05:09:25.287515 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8d85f94e-09fd-4655-a708-0ff3a88e3c35-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "8d85f94e-09fd-4655-a708-0ff3a88e3c35" (UID: "8d85f94e-09fd-4655-a708-0ff3a88e3c35"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:09:25 crc kubenswrapper[4651]: I1011 05:09:25.318047 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8d85f94e-09fd-4655-a708-0ff3a88e3c35-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "8d85f94e-09fd-4655-a708-0ff3a88e3c35" (UID: "8d85f94e-09fd-4655-a708-0ff3a88e3c35"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:09:25 crc kubenswrapper[4651]: I1011 05:09:25.334882 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8d85f94e-09fd-4655-a708-0ff3a88e3c35-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8d85f94e-09fd-4655-a708-0ff3a88e3c35" (UID: "8d85f94e-09fd-4655-a708-0ff3a88e3c35"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:09:25 crc kubenswrapper[4651]: I1011 05:09:25.360512 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8d85f94e-09fd-4655-a708-0ff3a88e3c35-config-data" (OuterVolumeSpecName: "config-data") pod "8d85f94e-09fd-4655-a708-0ff3a88e3c35" (UID: "8d85f94e-09fd-4655-a708-0ff3a88e3c35"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:09:25 crc kubenswrapper[4651]: I1011 05:09:25.361469 4651 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8d85f94e-09fd-4655-a708-0ff3a88e3c35-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 11 05:09:25 crc kubenswrapper[4651]: I1011 05:09:25.364247 4651 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/8d85f94e-09fd-4655-a708-0ff3a88e3c35-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 11 05:09:25 crc kubenswrapper[4651]: I1011 05:09:25.364329 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c28t4\" (UniqueName: \"kubernetes.io/projected/8d85f94e-09fd-4655-a708-0ff3a88e3c35-kube-api-access-c28t4\") on node \"crc\" DevicePath \"\"" Oct 11 05:09:25 crc kubenswrapper[4651]: I1011 05:09:25.364395 4651 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d85f94e-09fd-4655-a708-0ff3a88e3c35-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 05:09:25 crc kubenswrapper[4651]: I1011 05:09:25.364452 4651 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8d85f94e-09fd-4655-a708-0ff3a88e3c35-scripts\") on node \"crc\" DevicePath \"\"" Oct 11 05:09:25 crc kubenswrapper[4651]: I1011 05:09:25.465567 4651 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d85f94e-09fd-4655-a708-0ff3a88e3c35-config-data\") on node \"crc\" DevicePath \"\"" Oct 11 05:09:25 crc kubenswrapper[4651]: I1011 05:09:25.607990 4651 generic.go:334] "Generic (PLEG): container finished" podID="8d85f94e-09fd-4655-a708-0ff3a88e3c35" containerID="ada35ed2843e8625fb4507f6a29a52188baee8423ccdea3342aba8b95de44a57" exitCode=0 Oct 11 05:09:25 crc kubenswrapper[4651]: I1011 05:09:25.608044 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8d85f94e-09fd-4655-a708-0ff3a88e3c35","Type":"ContainerDied","Data":"ada35ed2843e8625fb4507f6a29a52188baee8423ccdea3342aba8b95de44a57"} Oct 11 05:09:25 crc kubenswrapper[4651]: I1011 05:09:25.608105 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 11 05:09:25 crc kubenswrapper[4651]: I1011 05:09:25.609000 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8d85f94e-09fd-4655-a708-0ff3a88e3c35","Type":"ContainerDied","Data":"96e68981b57f75cf77d1b090be3b1601d09636e62ed183aef6ee79e0da208a9d"} Oct 11 05:09:25 crc kubenswrapper[4651]: I1011 05:09:25.609035 4651 scope.go:117] "RemoveContainer" containerID="c60aba242817d9fae5ed46b210a38f634f12c9c73bfcc27f5338ff60f506db79" Oct 11 05:09:25 crc kubenswrapper[4651]: I1011 05:09:25.629058 4651 scope.go:117] "RemoveContainer" containerID="5aac2f5b65caacf570d31883365d712a9c8e515d3e45b7af4580dce080daf218" Oct 11 05:09:25 crc kubenswrapper[4651]: I1011 05:09:25.649883 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 11 05:09:25 crc kubenswrapper[4651]: I1011 05:09:25.652946 4651 scope.go:117] "RemoveContainer" containerID="fac62b9075fd04a29b5e17546553586969e4a45b32c113f37b1faeea7d210571" Oct 11 05:09:25 crc kubenswrapper[4651]: I1011 05:09:25.659094 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 11 05:09:25 crc kubenswrapper[4651]: I1011 05:09:25.676289 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 11 05:09:25 crc kubenswrapper[4651]: E1011 05:09:25.676733 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d85f94e-09fd-4655-a708-0ff3a88e3c35" containerName="ceilometer-notification-agent" Oct 11 05:09:25 crc kubenswrapper[4651]: I1011 05:09:25.676760 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d85f94e-09fd-4655-a708-0ff3a88e3c35" containerName="ceilometer-notification-agent" Oct 11 05:09:25 crc kubenswrapper[4651]: E1011 05:09:25.676794 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d85f94e-09fd-4655-a708-0ff3a88e3c35" containerName="proxy-httpd" Oct 11 05:09:25 crc kubenswrapper[4651]: I1011 05:09:25.676803 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d85f94e-09fd-4655-a708-0ff3a88e3c35" containerName="proxy-httpd" Oct 11 05:09:25 crc kubenswrapper[4651]: E1011 05:09:25.676811 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d85f94e-09fd-4655-a708-0ff3a88e3c35" containerName="ceilometer-central-agent" Oct 11 05:09:25 crc kubenswrapper[4651]: I1011 05:09:25.676836 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d85f94e-09fd-4655-a708-0ff3a88e3c35" containerName="ceilometer-central-agent" Oct 11 05:09:25 crc kubenswrapper[4651]: E1011 05:09:25.676848 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d85f94e-09fd-4655-a708-0ff3a88e3c35" containerName="sg-core" Oct 11 05:09:25 crc kubenswrapper[4651]: I1011 05:09:25.676856 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d85f94e-09fd-4655-a708-0ff3a88e3c35" containerName="sg-core" Oct 11 05:09:25 crc kubenswrapper[4651]: I1011 05:09:25.677079 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="8d85f94e-09fd-4655-a708-0ff3a88e3c35" containerName="sg-core" Oct 11 05:09:25 crc kubenswrapper[4651]: I1011 05:09:25.677103 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="8d85f94e-09fd-4655-a708-0ff3a88e3c35" containerName="ceilometer-notification-agent" Oct 11 05:09:25 crc kubenswrapper[4651]: I1011 05:09:25.677131 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="8d85f94e-09fd-4655-a708-0ff3a88e3c35" containerName="ceilometer-central-agent" Oct 11 05:09:25 crc kubenswrapper[4651]: I1011 05:09:25.677143 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="8d85f94e-09fd-4655-a708-0ff3a88e3c35" containerName="proxy-httpd" Oct 11 05:09:25 crc kubenswrapper[4651]: I1011 05:09:25.678093 4651 scope.go:117] "RemoveContainer" containerID="ada35ed2843e8625fb4507f6a29a52188baee8423ccdea3342aba8b95de44a57" Oct 11 05:09:25 crc kubenswrapper[4651]: I1011 05:09:25.679075 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 11 05:09:25 crc kubenswrapper[4651]: I1011 05:09:25.683082 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Oct 11 05:09:25 crc kubenswrapper[4651]: I1011 05:09:25.683263 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 11 05:09:25 crc kubenswrapper[4651]: I1011 05:09:25.683389 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 11 05:09:25 crc kubenswrapper[4651]: I1011 05:09:25.688357 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 11 05:09:25 crc kubenswrapper[4651]: I1011 05:09:25.705219 4651 scope.go:117] "RemoveContainer" containerID="c60aba242817d9fae5ed46b210a38f634f12c9c73bfcc27f5338ff60f506db79" Oct 11 05:09:25 crc kubenswrapper[4651]: E1011 05:09:25.706442 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c60aba242817d9fae5ed46b210a38f634f12c9c73bfcc27f5338ff60f506db79\": container with ID starting with c60aba242817d9fae5ed46b210a38f634f12c9c73bfcc27f5338ff60f506db79 not found: ID does not exist" containerID="c60aba242817d9fae5ed46b210a38f634f12c9c73bfcc27f5338ff60f506db79" Oct 11 05:09:25 crc kubenswrapper[4651]: I1011 05:09:25.706497 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c60aba242817d9fae5ed46b210a38f634f12c9c73bfcc27f5338ff60f506db79"} err="failed to get container status \"c60aba242817d9fae5ed46b210a38f634f12c9c73bfcc27f5338ff60f506db79\": rpc error: code = NotFound desc = could not find container \"c60aba242817d9fae5ed46b210a38f634f12c9c73bfcc27f5338ff60f506db79\": container with ID starting with c60aba242817d9fae5ed46b210a38f634f12c9c73bfcc27f5338ff60f506db79 not found: ID does not exist" Oct 11 05:09:25 crc kubenswrapper[4651]: I1011 05:09:25.706542 4651 scope.go:117] "RemoveContainer" containerID="5aac2f5b65caacf570d31883365d712a9c8e515d3e45b7af4580dce080daf218" Oct 11 05:09:25 crc kubenswrapper[4651]: E1011 05:09:25.707069 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5aac2f5b65caacf570d31883365d712a9c8e515d3e45b7af4580dce080daf218\": container with ID starting with 5aac2f5b65caacf570d31883365d712a9c8e515d3e45b7af4580dce080daf218 not found: ID does not exist" containerID="5aac2f5b65caacf570d31883365d712a9c8e515d3e45b7af4580dce080daf218" Oct 11 05:09:25 crc kubenswrapper[4651]: I1011 05:09:25.707102 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5aac2f5b65caacf570d31883365d712a9c8e515d3e45b7af4580dce080daf218"} err="failed to get container status \"5aac2f5b65caacf570d31883365d712a9c8e515d3e45b7af4580dce080daf218\": rpc error: code = NotFound desc = could not find container \"5aac2f5b65caacf570d31883365d712a9c8e515d3e45b7af4580dce080daf218\": container with ID starting with 5aac2f5b65caacf570d31883365d712a9c8e515d3e45b7af4580dce080daf218 not found: ID does not exist" Oct 11 05:09:25 crc kubenswrapper[4651]: I1011 05:09:25.707121 4651 scope.go:117] "RemoveContainer" containerID="fac62b9075fd04a29b5e17546553586969e4a45b32c113f37b1faeea7d210571" Oct 11 05:09:25 crc kubenswrapper[4651]: E1011 05:09:25.710952 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fac62b9075fd04a29b5e17546553586969e4a45b32c113f37b1faeea7d210571\": container with ID starting with fac62b9075fd04a29b5e17546553586969e4a45b32c113f37b1faeea7d210571 not found: ID does not exist" containerID="fac62b9075fd04a29b5e17546553586969e4a45b32c113f37b1faeea7d210571" Oct 11 05:09:25 crc kubenswrapper[4651]: I1011 05:09:25.710992 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fac62b9075fd04a29b5e17546553586969e4a45b32c113f37b1faeea7d210571"} err="failed to get container status \"fac62b9075fd04a29b5e17546553586969e4a45b32c113f37b1faeea7d210571\": rpc error: code = NotFound desc = could not find container \"fac62b9075fd04a29b5e17546553586969e4a45b32c113f37b1faeea7d210571\": container with ID starting with fac62b9075fd04a29b5e17546553586969e4a45b32c113f37b1faeea7d210571 not found: ID does not exist" Oct 11 05:09:25 crc kubenswrapper[4651]: I1011 05:09:25.711016 4651 scope.go:117] "RemoveContainer" containerID="ada35ed2843e8625fb4507f6a29a52188baee8423ccdea3342aba8b95de44a57" Oct 11 05:09:25 crc kubenswrapper[4651]: E1011 05:09:25.711362 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ada35ed2843e8625fb4507f6a29a52188baee8423ccdea3342aba8b95de44a57\": container with ID starting with ada35ed2843e8625fb4507f6a29a52188baee8423ccdea3342aba8b95de44a57 not found: ID does not exist" containerID="ada35ed2843e8625fb4507f6a29a52188baee8423ccdea3342aba8b95de44a57" Oct 11 05:09:25 crc kubenswrapper[4651]: I1011 05:09:25.711390 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ada35ed2843e8625fb4507f6a29a52188baee8423ccdea3342aba8b95de44a57"} err="failed to get container status \"ada35ed2843e8625fb4507f6a29a52188baee8423ccdea3342aba8b95de44a57\": rpc error: code = NotFound desc = could not find container \"ada35ed2843e8625fb4507f6a29a52188baee8423ccdea3342aba8b95de44a57\": container with ID starting with ada35ed2843e8625fb4507f6a29a52188baee8423ccdea3342aba8b95de44a57 not found: ID does not exist" Oct 11 05:09:25 crc kubenswrapper[4651]: I1011 05:09:25.775478 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4eef40bb-34c1-42bd-acef-5a58f3737fa1-log-httpd\") pod \"ceilometer-0\" (UID: \"4eef40bb-34c1-42bd-acef-5a58f3737fa1\") " pod="openstack/ceilometer-0" Oct 11 05:09:25 crc kubenswrapper[4651]: I1011 05:09:25.775560 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4eef40bb-34c1-42bd-acef-5a58f3737fa1-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"4eef40bb-34c1-42bd-acef-5a58f3737fa1\") " pod="openstack/ceilometer-0" Oct 11 05:09:25 crc kubenswrapper[4651]: I1011 05:09:25.775593 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zwcq5\" (UniqueName: \"kubernetes.io/projected/4eef40bb-34c1-42bd-acef-5a58f3737fa1-kube-api-access-zwcq5\") pod \"ceilometer-0\" (UID: \"4eef40bb-34c1-42bd-acef-5a58f3737fa1\") " pod="openstack/ceilometer-0" Oct 11 05:09:25 crc kubenswrapper[4651]: I1011 05:09:25.775689 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/4eef40bb-34c1-42bd-acef-5a58f3737fa1-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"4eef40bb-34c1-42bd-acef-5a58f3737fa1\") " pod="openstack/ceilometer-0" Oct 11 05:09:25 crc kubenswrapper[4651]: I1011 05:09:25.775750 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/4eef40bb-34c1-42bd-acef-5a58f3737fa1-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"4eef40bb-34c1-42bd-acef-5a58f3737fa1\") " pod="openstack/ceilometer-0" Oct 11 05:09:25 crc kubenswrapper[4651]: I1011 05:09:25.775776 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4eef40bb-34c1-42bd-acef-5a58f3737fa1-scripts\") pod \"ceilometer-0\" (UID: \"4eef40bb-34c1-42bd-acef-5a58f3737fa1\") " pod="openstack/ceilometer-0" Oct 11 05:09:25 crc kubenswrapper[4651]: I1011 05:09:25.775797 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4eef40bb-34c1-42bd-acef-5a58f3737fa1-config-data\") pod \"ceilometer-0\" (UID: \"4eef40bb-34c1-42bd-acef-5a58f3737fa1\") " pod="openstack/ceilometer-0" Oct 11 05:09:25 crc kubenswrapper[4651]: I1011 05:09:25.775917 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4eef40bb-34c1-42bd-acef-5a58f3737fa1-run-httpd\") pod \"ceilometer-0\" (UID: \"4eef40bb-34c1-42bd-acef-5a58f3737fa1\") " pod="openstack/ceilometer-0" Oct 11 05:09:25 crc kubenswrapper[4651]: I1011 05:09:25.876910 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/4eef40bb-34c1-42bd-acef-5a58f3737fa1-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"4eef40bb-34c1-42bd-acef-5a58f3737fa1\") " pod="openstack/ceilometer-0" Oct 11 05:09:25 crc kubenswrapper[4651]: I1011 05:09:25.877245 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/4eef40bb-34c1-42bd-acef-5a58f3737fa1-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"4eef40bb-34c1-42bd-acef-5a58f3737fa1\") " pod="openstack/ceilometer-0" Oct 11 05:09:25 crc kubenswrapper[4651]: I1011 05:09:25.877268 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4eef40bb-34c1-42bd-acef-5a58f3737fa1-scripts\") pod \"ceilometer-0\" (UID: \"4eef40bb-34c1-42bd-acef-5a58f3737fa1\") " pod="openstack/ceilometer-0" Oct 11 05:09:25 crc kubenswrapper[4651]: I1011 05:09:25.877285 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4eef40bb-34c1-42bd-acef-5a58f3737fa1-config-data\") pod \"ceilometer-0\" (UID: \"4eef40bb-34c1-42bd-acef-5a58f3737fa1\") " pod="openstack/ceilometer-0" Oct 11 05:09:25 crc kubenswrapper[4651]: I1011 05:09:25.877319 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4eef40bb-34c1-42bd-acef-5a58f3737fa1-run-httpd\") pod \"ceilometer-0\" (UID: \"4eef40bb-34c1-42bd-acef-5a58f3737fa1\") " pod="openstack/ceilometer-0" Oct 11 05:09:25 crc kubenswrapper[4651]: I1011 05:09:25.877364 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4eef40bb-34c1-42bd-acef-5a58f3737fa1-log-httpd\") pod \"ceilometer-0\" (UID: \"4eef40bb-34c1-42bd-acef-5a58f3737fa1\") " pod="openstack/ceilometer-0" Oct 11 05:09:25 crc kubenswrapper[4651]: I1011 05:09:25.877401 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4eef40bb-34c1-42bd-acef-5a58f3737fa1-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"4eef40bb-34c1-42bd-acef-5a58f3737fa1\") " pod="openstack/ceilometer-0" Oct 11 05:09:25 crc kubenswrapper[4651]: I1011 05:09:25.877423 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zwcq5\" (UniqueName: \"kubernetes.io/projected/4eef40bb-34c1-42bd-acef-5a58f3737fa1-kube-api-access-zwcq5\") pod \"ceilometer-0\" (UID: \"4eef40bb-34c1-42bd-acef-5a58f3737fa1\") " pod="openstack/ceilometer-0" Oct 11 05:09:25 crc kubenswrapper[4651]: I1011 05:09:25.881706 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/4eef40bb-34c1-42bd-acef-5a58f3737fa1-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"4eef40bb-34c1-42bd-acef-5a58f3737fa1\") " pod="openstack/ceilometer-0" Oct 11 05:09:25 crc kubenswrapper[4651]: I1011 05:09:25.882186 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4eef40bb-34c1-42bd-acef-5a58f3737fa1-log-httpd\") pod \"ceilometer-0\" (UID: \"4eef40bb-34c1-42bd-acef-5a58f3737fa1\") " pod="openstack/ceilometer-0" Oct 11 05:09:25 crc kubenswrapper[4651]: I1011 05:09:25.882395 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4eef40bb-34c1-42bd-acef-5a58f3737fa1-run-httpd\") pod \"ceilometer-0\" (UID: \"4eef40bb-34c1-42bd-acef-5a58f3737fa1\") " pod="openstack/ceilometer-0" Oct 11 05:09:25 crc kubenswrapper[4651]: I1011 05:09:25.883067 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8d85f94e-09fd-4655-a708-0ff3a88e3c35" path="/var/lib/kubelet/pods/8d85f94e-09fd-4655-a708-0ff3a88e3c35/volumes" Oct 11 05:09:25 crc kubenswrapper[4651]: I1011 05:09:25.885165 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4eef40bb-34c1-42bd-acef-5a58f3737fa1-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"4eef40bb-34c1-42bd-acef-5a58f3737fa1\") " pod="openstack/ceilometer-0" Oct 11 05:09:25 crc kubenswrapper[4651]: I1011 05:09:25.885677 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4eef40bb-34c1-42bd-acef-5a58f3737fa1-config-data\") pod \"ceilometer-0\" (UID: \"4eef40bb-34c1-42bd-acef-5a58f3737fa1\") " pod="openstack/ceilometer-0" Oct 11 05:09:25 crc kubenswrapper[4651]: I1011 05:09:25.885945 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4eef40bb-34c1-42bd-acef-5a58f3737fa1-scripts\") pod \"ceilometer-0\" (UID: \"4eef40bb-34c1-42bd-acef-5a58f3737fa1\") " pod="openstack/ceilometer-0" Oct 11 05:09:25 crc kubenswrapper[4651]: I1011 05:09:25.886183 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/4eef40bb-34c1-42bd-acef-5a58f3737fa1-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"4eef40bb-34c1-42bd-acef-5a58f3737fa1\") " pod="openstack/ceilometer-0" Oct 11 05:09:25 crc kubenswrapper[4651]: I1011 05:09:25.897068 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zwcq5\" (UniqueName: \"kubernetes.io/projected/4eef40bb-34c1-42bd-acef-5a58f3737fa1-kube-api-access-zwcq5\") pod \"ceilometer-0\" (UID: \"4eef40bb-34c1-42bd-acef-5a58f3737fa1\") " pod="openstack/ceilometer-0" Oct 11 05:09:26 crc kubenswrapper[4651]: I1011 05:09:26.000512 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 11 05:09:26 crc kubenswrapper[4651]: I1011 05:09:26.458538 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 11 05:09:26 crc kubenswrapper[4651]: W1011 05:09:26.465562 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4eef40bb_34c1_42bd_acef_5a58f3737fa1.slice/crio-622ce5d5f78351b7b6d09150e5342e1603473a0700d493fddfceeadc311304e3 WatchSource:0}: Error finding container 622ce5d5f78351b7b6d09150e5342e1603473a0700d493fddfceeadc311304e3: Status 404 returned error can't find the container with id 622ce5d5f78351b7b6d09150e5342e1603473a0700d493fddfceeadc311304e3 Oct 11 05:09:26 crc kubenswrapper[4651]: I1011 05:09:26.618156 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4eef40bb-34c1-42bd-acef-5a58f3737fa1","Type":"ContainerStarted","Data":"622ce5d5f78351b7b6d09150e5342e1603473a0700d493fddfceeadc311304e3"} Oct 11 05:09:27 crc kubenswrapper[4651]: I1011 05:09:27.323554 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-6859-account-create-xzc42"] Oct 11 05:09:27 crc kubenswrapper[4651]: I1011 05:09:27.325033 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-6859-account-create-xzc42" Oct 11 05:09:27 crc kubenswrapper[4651]: I1011 05:09:27.333617 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-6859-account-create-xzc42"] Oct 11 05:09:27 crc kubenswrapper[4651]: I1011 05:09:27.333770 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Oct 11 05:09:27 crc kubenswrapper[4651]: I1011 05:09:27.407950 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tw2m5\" (UniqueName: \"kubernetes.io/projected/dd2c676b-5ffe-4117-bd68-481ab14bcfb6-kube-api-access-tw2m5\") pod \"nova-api-6859-account-create-xzc42\" (UID: \"dd2c676b-5ffe-4117-bd68-481ab14bcfb6\") " pod="openstack/nova-api-6859-account-create-xzc42" Oct 11 05:09:27 crc kubenswrapper[4651]: I1011 05:09:27.512748 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tw2m5\" (UniqueName: \"kubernetes.io/projected/dd2c676b-5ffe-4117-bd68-481ab14bcfb6-kube-api-access-tw2m5\") pod \"nova-api-6859-account-create-xzc42\" (UID: \"dd2c676b-5ffe-4117-bd68-481ab14bcfb6\") " pod="openstack/nova-api-6859-account-create-xzc42" Oct 11 05:09:27 crc kubenswrapper[4651]: I1011 05:09:27.535248 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tw2m5\" (UniqueName: \"kubernetes.io/projected/dd2c676b-5ffe-4117-bd68-481ab14bcfb6-kube-api-access-tw2m5\") pod \"nova-api-6859-account-create-xzc42\" (UID: \"dd2c676b-5ffe-4117-bd68-481ab14bcfb6\") " pod="openstack/nova-api-6859-account-create-xzc42" Oct 11 05:09:27 crc kubenswrapper[4651]: I1011 05:09:27.553529 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-b783-account-create-z7jwg"] Oct 11 05:09:27 crc kubenswrapper[4651]: I1011 05:09:27.555934 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-b783-account-create-z7jwg" Oct 11 05:09:27 crc kubenswrapper[4651]: I1011 05:09:27.558705 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Oct 11 05:09:27 crc kubenswrapper[4651]: I1011 05:09:27.562594 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-b783-account-create-z7jwg"] Oct 11 05:09:27 crc kubenswrapper[4651]: I1011 05:09:27.630024 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4eef40bb-34c1-42bd-acef-5a58f3737fa1","Type":"ContainerStarted","Data":"dcc11aa8f58e46b93834965562036ce361ca05d52c4144d1b7a8c0b091723b2a"} Oct 11 05:09:27 crc kubenswrapper[4651]: I1011 05:09:27.650910 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-6859-account-create-xzc42" Oct 11 05:09:27 crc kubenswrapper[4651]: I1011 05:09:27.715579 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vsh6m\" (UniqueName: \"kubernetes.io/projected/7edf14d6-b944-4477-8803-9536566e63ba-kube-api-access-vsh6m\") pod \"nova-cell0-b783-account-create-z7jwg\" (UID: \"7edf14d6-b944-4477-8803-9536566e63ba\") " pod="openstack/nova-cell0-b783-account-create-z7jwg" Oct 11 05:09:27 crc kubenswrapper[4651]: I1011 05:09:27.725933 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-5fc8-account-create-jm72j"] Oct 11 05:09:27 crc kubenswrapper[4651]: I1011 05:09:27.727211 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-5fc8-account-create-jm72j" Oct 11 05:09:27 crc kubenswrapper[4651]: I1011 05:09:27.729013 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Oct 11 05:09:27 crc kubenswrapper[4651]: I1011 05:09:27.778156 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-5fc8-account-create-jm72j"] Oct 11 05:09:27 crc kubenswrapper[4651]: I1011 05:09:27.817047 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vsh6m\" (UniqueName: \"kubernetes.io/projected/7edf14d6-b944-4477-8803-9536566e63ba-kube-api-access-vsh6m\") pod \"nova-cell0-b783-account-create-z7jwg\" (UID: \"7edf14d6-b944-4477-8803-9536566e63ba\") " pod="openstack/nova-cell0-b783-account-create-z7jwg" Oct 11 05:09:27 crc kubenswrapper[4651]: I1011 05:09:27.817213 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dmvm9\" (UniqueName: \"kubernetes.io/projected/949f137e-f9eb-43e5-b1bc-cea47b356ee6-kube-api-access-dmvm9\") pod \"nova-cell1-5fc8-account-create-jm72j\" (UID: \"949f137e-f9eb-43e5-b1bc-cea47b356ee6\") " pod="openstack/nova-cell1-5fc8-account-create-jm72j" Oct 11 05:09:27 crc kubenswrapper[4651]: I1011 05:09:27.836049 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vsh6m\" (UniqueName: \"kubernetes.io/projected/7edf14d6-b944-4477-8803-9536566e63ba-kube-api-access-vsh6m\") pod \"nova-cell0-b783-account-create-z7jwg\" (UID: \"7edf14d6-b944-4477-8803-9536566e63ba\") " pod="openstack/nova-cell0-b783-account-create-z7jwg" Oct 11 05:09:27 crc kubenswrapper[4651]: I1011 05:09:27.879963 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-b783-account-create-z7jwg" Oct 11 05:09:27 crc kubenswrapper[4651]: I1011 05:09:27.922219 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dmvm9\" (UniqueName: \"kubernetes.io/projected/949f137e-f9eb-43e5-b1bc-cea47b356ee6-kube-api-access-dmvm9\") pod \"nova-cell1-5fc8-account-create-jm72j\" (UID: \"949f137e-f9eb-43e5-b1bc-cea47b356ee6\") " pod="openstack/nova-cell1-5fc8-account-create-jm72j" Oct 11 05:09:27 crc kubenswrapper[4651]: I1011 05:09:27.941804 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dmvm9\" (UniqueName: \"kubernetes.io/projected/949f137e-f9eb-43e5-b1bc-cea47b356ee6-kube-api-access-dmvm9\") pod \"nova-cell1-5fc8-account-create-jm72j\" (UID: \"949f137e-f9eb-43e5-b1bc-cea47b356ee6\") " pod="openstack/nova-cell1-5fc8-account-create-jm72j" Oct 11 05:09:28 crc kubenswrapper[4651]: I1011 05:09:28.089841 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-5fc8-account-create-jm72j" Oct 11 05:09:28 crc kubenswrapper[4651]: I1011 05:09:28.156161 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-6859-account-create-xzc42"] Oct 11 05:09:28 crc kubenswrapper[4651]: W1011 05:09:28.163084 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddd2c676b_5ffe_4117_bd68_481ab14bcfb6.slice/crio-20dfb2a35896ebf9b7c50c2242b6d1aeb6722d3a1a77d9c5f8f703d054c8ff68 WatchSource:0}: Error finding container 20dfb2a35896ebf9b7c50c2242b6d1aeb6722d3a1a77d9c5f8f703d054c8ff68: Status 404 returned error can't find the container with id 20dfb2a35896ebf9b7c50c2242b6d1aeb6722d3a1a77d9c5f8f703d054c8ff68 Oct 11 05:09:28 crc kubenswrapper[4651]: I1011 05:09:28.297732 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-b783-account-create-z7jwg"] Oct 11 05:09:28 crc kubenswrapper[4651]: I1011 05:09:28.526804 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-5fc8-account-create-jm72j"] Oct 11 05:09:28 crc kubenswrapper[4651]: I1011 05:09:28.641914 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4eef40bb-34c1-42bd-acef-5a58f3737fa1","Type":"ContainerStarted","Data":"1d54f4fded1b6a9a87f4fffd04eae084cda01bfbe6537be6617a4cc9d36534f2"} Oct 11 05:09:28 crc kubenswrapper[4651]: I1011 05:09:28.641956 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4eef40bb-34c1-42bd-acef-5a58f3737fa1","Type":"ContainerStarted","Data":"4e6fd4ac980d59f619b0dc177c204d462d629eb3f0c8268921b4353231bb9ec1"} Oct 11 05:09:28 crc kubenswrapper[4651]: I1011 05:09:28.644346 4651 generic.go:334] "Generic (PLEG): container finished" podID="7edf14d6-b944-4477-8803-9536566e63ba" containerID="43763f5ef3d329b5e4c0b696844afe6d616f7ce3113c771c99213e83481c19c2" exitCode=0 Oct 11 05:09:28 crc kubenswrapper[4651]: I1011 05:09:28.644467 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-b783-account-create-z7jwg" event={"ID":"7edf14d6-b944-4477-8803-9536566e63ba","Type":"ContainerDied","Data":"43763f5ef3d329b5e4c0b696844afe6d616f7ce3113c771c99213e83481c19c2"} Oct 11 05:09:28 crc kubenswrapper[4651]: I1011 05:09:28.644544 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-b783-account-create-z7jwg" event={"ID":"7edf14d6-b944-4477-8803-9536566e63ba","Type":"ContainerStarted","Data":"4a928e037f4b774c9c1d116a952d30d6faf50b269c1f1cf3d2307e457cb4c17d"} Oct 11 05:09:28 crc kubenswrapper[4651]: I1011 05:09:28.647201 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-5fc8-account-create-jm72j" event={"ID":"949f137e-f9eb-43e5-b1bc-cea47b356ee6","Type":"ContainerStarted","Data":"fbfd30063fab0eb6a4933ffc3e9e3d81e0c5b28734ce8e6a558a17f901ef8462"} Oct 11 05:09:28 crc kubenswrapper[4651]: I1011 05:09:28.649205 4651 generic.go:334] "Generic (PLEG): container finished" podID="dd2c676b-5ffe-4117-bd68-481ab14bcfb6" containerID="edee163ecd42935dd7592d5e2c2dcd46840fd077fc95d7b36b0184ae68e6eae3" exitCode=0 Oct 11 05:09:28 crc kubenswrapper[4651]: I1011 05:09:28.649236 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-6859-account-create-xzc42" event={"ID":"dd2c676b-5ffe-4117-bd68-481ab14bcfb6","Type":"ContainerDied","Data":"edee163ecd42935dd7592d5e2c2dcd46840fd077fc95d7b36b0184ae68e6eae3"} Oct 11 05:09:28 crc kubenswrapper[4651]: I1011 05:09:28.649250 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-6859-account-create-xzc42" event={"ID":"dd2c676b-5ffe-4117-bd68-481ab14bcfb6","Type":"ContainerStarted","Data":"20dfb2a35896ebf9b7c50c2242b6d1aeb6722d3a1a77d9c5f8f703d054c8ff68"} Oct 11 05:09:29 crc kubenswrapper[4651]: I1011 05:09:29.658275 4651 generic.go:334] "Generic (PLEG): container finished" podID="949f137e-f9eb-43e5-b1bc-cea47b356ee6" containerID="33a245491c814be790ad16a8c1d5a6a92c943dbd5caeca8dbd7d3ebf0b9bd67a" exitCode=0 Oct 11 05:09:29 crc kubenswrapper[4651]: I1011 05:09:29.658411 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-5fc8-account-create-jm72j" event={"ID":"949f137e-f9eb-43e5-b1bc-cea47b356ee6","Type":"ContainerDied","Data":"33a245491c814be790ad16a8c1d5a6a92c943dbd5caeca8dbd7d3ebf0b9bd67a"} Oct 11 05:09:29 crc kubenswrapper[4651]: I1011 05:09:29.902308 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Oct 11 05:09:29 crc kubenswrapper[4651]: I1011 05:09:29.902368 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Oct 11 05:09:30 crc kubenswrapper[4651]: I1011 05:09:30.039447 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Oct 11 05:09:30 crc kubenswrapper[4651]: I1011 05:09:30.044351 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Oct 11 05:09:30 crc kubenswrapper[4651]: I1011 05:09:30.115016 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-6859-account-create-xzc42" Oct 11 05:09:30 crc kubenswrapper[4651]: I1011 05:09:30.124610 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-b783-account-create-z7jwg" Oct 11 05:09:30 crc kubenswrapper[4651]: I1011 05:09:30.273343 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tw2m5\" (UniqueName: \"kubernetes.io/projected/dd2c676b-5ffe-4117-bd68-481ab14bcfb6-kube-api-access-tw2m5\") pod \"dd2c676b-5ffe-4117-bd68-481ab14bcfb6\" (UID: \"dd2c676b-5ffe-4117-bd68-481ab14bcfb6\") " Oct 11 05:09:30 crc kubenswrapper[4651]: I1011 05:09:30.273395 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vsh6m\" (UniqueName: \"kubernetes.io/projected/7edf14d6-b944-4477-8803-9536566e63ba-kube-api-access-vsh6m\") pod \"7edf14d6-b944-4477-8803-9536566e63ba\" (UID: \"7edf14d6-b944-4477-8803-9536566e63ba\") " Oct 11 05:09:30 crc kubenswrapper[4651]: I1011 05:09:30.278201 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dd2c676b-5ffe-4117-bd68-481ab14bcfb6-kube-api-access-tw2m5" (OuterVolumeSpecName: "kube-api-access-tw2m5") pod "dd2c676b-5ffe-4117-bd68-481ab14bcfb6" (UID: "dd2c676b-5ffe-4117-bd68-481ab14bcfb6"). InnerVolumeSpecName "kube-api-access-tw2m5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:09:30 crc kubenswrapper[4651]: I1011 05:09:30.283000 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7edf14d6-b944-4477-8803-9536566e63ba-kube-api-access-vsh6m" (OuterVolumeSpecName: "kube-api-access-vsh6m") pod "7edf14d6-b944-4477-8803-9536566e63ba" (UID: "7edf14d6-b944-4477-8803-9536566e63ba"). InnerVolumeSpecName "kube-api-access-vsh6m". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:09:30 crc kubenswrapper[4651]: I1011 05:09:30.375206 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tw2m5\" (UniqueName: \"kubernetes.io/projected/dd2c676b-5ffe-4117-bd68-481ab14bcfb6-kube-api-access-tw2m5\") on node \"crc\" DevicePath \"\"" Oct 11 05:09:30 crc kubenswrapper[4651]: I1011 05:09:30.375250 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vsh6m\" (UniqueName: \"kubernetes.io/projected/7edf14d6-b944-4477-8803-9536566e63ba-kube-api-access-vsh6m\") on node \"crc\" DevicePath \"\"" Oct 11 05:09:30 crc kubenswrapper[4651]: I1011 05:09:30.670690 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-6859-account-create-xzc42" event={"ID":"dd2c676b-5ffe-4117-bd68-481ab14bcfb6","Type":"ContainerDied","Data":"20dfb2a35896ebf9b7c50c2242b6d1aeb6722d3a1a77d9c5f8f703d054c8ff68"} Oct 11 05:09:30 crc kubenswrapper[4651]: I1011 05:09:30.671938 4651 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="20dfb2a35896ebf9b7c50c2242b6d1aeb6722d3a1a77d9c5f8f703d054c8ff68" Oct 11 05:09:30 crc kubenswrapper[4651]: I1011 05:09:30.670747 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-6859-account-create-xzc42" Oct 11 05:09:30 crc kubenswrapper[4651]: I1011 05:09:30.674055 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4eef40bb-34c1-42bd-acef-5a58f3737fa1","Type":"ContainerStarted","Data":"fbbed0d189b365de6dc886b523afc5e658a4b3e09c5924764e4888227e080ee0"} Oct 11 05:09:30 crc kubenswrapper[4651]: I1011 05:09:30.674204 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 11 05:09:30 crc kubenswrapper[4651]: I1011 05:09:30.675596 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-b783-account-create-z7jwg" Oct 11 05:09:30 crc kubenswrapper[4651]: I1011 05:09:30.675564 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-b783-account-create-z7jwg" event={"ID":"7edf14d6-b944-4477-8803-9536566e63ba","Type":"ContainerDied","Data":"4a928e037f4b774c9c1d116a952d30d6faf50b269c1f1cf3d2307e457cb4c17d"} Oct 11 05:09:30 crc kubenswrapper[4651]: I1011 05:09:30.675850 4651 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4a928e037f4b774c9c1d116a952d30d6faf50b269c1f1cf3d2307e457cb4c17d" Oct 11 05:09:30 crc kubenswrapper[4651]: I1011 05:09:30.675961 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Oct 11 05:09:30 crc kubenswrapper[4651]: I1011 05:09:30.676093 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Oct 11 05:09:30 crc kubenswrapper[4651]: I1011 05:09:30.707347 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.531961787 podStartE2EDuration="5.707311458s" podCreationTimestamp="2025-10-11 05:09:25 +0000 UTC" firstStartedPulling="2025-10-11 05:09:26.468327922 +0000 UTC m=+1087.364560738" lastFinishedPulling="2025-10-11 05:09:29.643677573 +0000 UTC m=+1090.539910409" observedRunningTime="2025-10-11 05:09:30.698295369 +0000 UTC m=+1091.594528175" watchObservedRunningTime="2025-10-11 05:09:30.707311458 +0000 UTC m=+1091.603544254" Oct 11 05:09:31 crc kubenswrapper[4651]: I1011 05:09:31.010275 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-5fc8-account-create-jm72j" Oct 11 05:09:31 crc kubenswrapper[4651]: I1011 05:09:31.086462 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dmvm9\" (UniqueName: \"kubernetes.io/projected/949f137e-f9eb-43e5-b1bc-cea47b356ee6-kube-api-access-dmvm9\") pod \"949f137e-f9eb-43e5-b1bc-cea47b356ee6\" (UID: \"949f137e-f9eb-43e5-b1bc-cea47b356ee6\") " Oct 11 05:09:31 crc kubenswrapper[4651]: I1011 05:09:31.091412 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/949f137e-f9eb-43e5-b1bc-cea47b356ee6-kube-api-access-dmvm9" (OuterVolumeSpecName: "kube-api-access-dmvm9") pod "949f137e-f9eb-43e5-b1bc-cea47b356ee6" (UID: "949f137e-f9eb-43e5-b1bc-cea47b356ee6"). InnerVolumeSpecName "kube-api-access-dmvm9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:09:31 crc kubenswrapper[4651]: I1011 05:09:31.189176 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dmvm9\" (UniqueName: \"kubernetes.io/projected/949f137e-f9eb-43e5-b1bc-cea47b356ee6-kube-api-access-dmvm9\") on node \"crc\" DevicePath \"\"" Oct 11 05:09:31 crc kubenswrapper[4651]: I1011 05:09:31.690857 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-5fc8-account-create-jm72j" event={"ID":"949f137e-f9eb-43e5-b1bc-cea47b356ee6","Type":"ContainerDied","Data":"fbfd30063fab0eb6a4933ffc3e9e3d81e0c5b28734ce8e6a558a17f901ef8462"} Oct 11 05:09:31 crc kubenswrapper[4651]: I1011 05:09:31.690921 4651 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fbfd30063fab0eb6a4933ffc3e9e3d81e0c5b28734ce8e6a558a17f901ef8462" Oct 11 05:09:31 crc kubenswrapper[4651]: I1011 05:09:31.690933 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-5fc8-account-create-jm72j" Oct 11 05:09:32 crc kubenswrapper[4651]: I1011 05:09:32.061347 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Oct 11 05:09:32 crc kubenswrapper[4651]: I1011 05:09:32.065292 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Oct 11 05:09:32 crc kubenswrapper[4651]: I1011 05:09:32.098532 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Oct 11 05:09:32 crc kubenswrapper[4651]: I1011 05:09:32.115027 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Oct 11 05:09:32 crc kubenswrapper[4651]: I1011 05:09:32.597314 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Oct 11 05:09:32 crc kubenswrapper[4651]: I1011 05:09:32.597377 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Oct 11 05:09:32 crc kubenswrapper[4651]: I1011 05:09:32.699538 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Oct 11 05:09:32 crc kubenswrapper[4651]: I1011 05:09:32.699574 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Oct 11 05:09:32 crc kubenswrapper[4651]: I1011 05:09:32.881615 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-464ng"] Oct 11 05:09:32 crc kubenswrapper[4651]: E1011 05:09:32.881969 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7edf14d6-b944-4477-8803-9536566e63ba" containerName="mariadb-account-create" Oct 11 05:09:32 crc kubenswrapper[4651]: I1011 05:09:32.881986 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="7edf14d6-b944-4477-8803-9536566e63ba" containerName="mariadb-account-create" Oct 11 05:09:32 crc kubenswrapper[4651]: E1011 05:09:32.882005 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="949f137e-f9eb-43e5-b1bc-cea47b356ee6" containerName="mariadb-account-create" Oct 11 05:09:32 crc kubenswrapper[4651]: I1011 05:09:32.882011 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="949f137e-f9eb-43e5-b1bc-cea47b356ee6" containerName="mariadb-account-create" Oct 11 05:09:32 crc kubenswrapper[4651]: E1011 05:09:32.882019 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd2c676b-5ffe-4117-bd68-481ab14bcfb6" containerName="mariadb-account-create" Oct 11 05:09:32 crc kubenswrapper[4651]: I1011 05:09:32.882027 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd2c676b-5ffe-4117-bd68-481ab14bcfb6" containerName="mariadb-account-create" Oct 11 05:09:32 crc kubenswrapper[4651]: I1011 05:09:32.882199 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="dd2c676b-5ffe-4117-bd68-481ab14bcfb6" containerName="mariadb-account-create" Oct 11 05:09:32 crc kubenswrapper[4651]: I1011 05:09:32.882214 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="949f137e-f9eb-43e5-b1bc-cea47b356ee6" containerName="mariadb-account-create" Oct 11 05:09:32 crc kubenswrapper[4651]: I1011 05:09:32.882231 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="7edf14d6-b944-4477-8803-9536566e63ba" containerName="mariadb-account-create" Oct 11 05:09:32 crc kubenswrapper[4651]: I1011 05:09:32.882766 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-464ng" Oct 11 05:09:32 crc kubenswrapper[4651]: I1011 05:09:32.885140 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Oct 11 05:09:32 crc kubenswrapper[4651]: I1011 05:09:32.885747 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Oct 11 05:09:32 crc kubenswrapper[4651]: I1011 05:09:32.888912 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-tcrp8" Oct 11 05:09:32 crc kubenswrapper[4651]: I1011 05:09:32.894058 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-464ng"] Oct 11 05:09:32 crc kubenswrapper[4651]: I1011 05:09:32.945258 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/899b7e7f-0b2f-48f9-8b7e-c86614b06f41-config-data\") pod \"nova-cell0-conductor-db-sync-464ng\" (UID: \"899b7e7f-0b2f-48f9-8b7e-c86614b06f41\") " pod="openstack/nova-cell0-conductor-db-sync-464ng" Oct 11 05:09:32 crc kubenswrapper[4651]: I1011 05:09:32.945348 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jr2jr\" (UniqueName: \"kubernetes.io/projected/899b7e7f-0b2f-48f9-8b7e-c86614b06f41-kube-api-access-jr2jr\") pod \"nova-cell0-conductor-db-sync-464ng\" (UID: \"899b7e7f-0b2f-48f9-8b7e-c86614b06f41\") " pod="openstack/nova-cell0-conductor-db-sync-464ng" Oct 11 05:09:32 crc kubenswrapper[4651]: I1011 05:09:32.945385 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/899b7e7f-0b2f-48f9-8b7e-c86614b06f41-scripts\") pod \"nova-cell0-conductor-db-sync-464ng\" (UID: \"899b7e7f-0b2f-48f9-8b7e-c86614b06f41\") " pod="openstack/nova-cell0-conductor-db-sync-464ng" Oct 11 05:09:32 crc kubenswrapper[4651]: I1011 05:09:32.945528 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/899b7e7f-0b2f-48f9-8b7e-c86614b06f41-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-464ng\" (UID: \"899b7e7f-0b2f-48f9-8b7e-c86614b06f41\") " pod="openstack/nova-cell0-conductor-db-sync-464ng" Oct 11 05:09:33 crc kubenswrapper[4651]: I1011 05:09:33.047422 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/899b7e7f-0b2f-48f9-8b7e-c86614b06f41-config-data\") pod \"nova-cell0-conductor-db-sync-464ng\" (UID: \"899b7e7f-0b2f-48f9-8b7e-c86614b06f41\") " pod="openstack/nova-cell0-conductor-db-sync-464ng" Oct 11 05:09:33 crc kubenswrapper[4651]: I1011 05:09:33.047484 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jr2jr\" (UniqueName: \"kubernetes.io/projected/899b7e7f-0b2f-48f9-8b7e-c86614b06f41-kube-api-access-jr2jr\") pod \"nova-cell0-conductor-db-sync-464ng\" (UID: \"899b7e7f-0b2f-48f9-8b7e-c86614b06f41\") " pod="openstack/nova-cell0-conductor-db-sync-464ng" Oct 11 05:09:33 crc kubenswrapper[4651]: I1011 05:09:33.047520 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/899b7e7f-0b2f-48f9-8b7e-c86614b06f41-scripts\") pod \"nova-cell0-conductor-db-sync-464ng\" (UID: \"899b7e7f-0b2f-48f9-8b7e-c86614b06f41\") " pod="openstack/nova-cell0-conductor-db-sync-464ng" Oct 11 05:09:33 crc kubenswrapper[4651]: I1011 05:09:33.047608 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/899b7e7f-0b2f-48f9-8b7e-c86614b06f41-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-464ng\" (UID: \"899b7e7f-0b2f-48f9-8b7e-c86614b06f41\") " pod="openstack/nova-cell0-conductor-db-sync-464ng" Oct 11 05:09:33 crc kubenswrapper[4651]: I1011 05:09:33.056426 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/899b7e7f-0b2f-48f9-8b7e-c86614b06f41-config-data\") pod \"nova-cell0-conductor-db-sync-464ng\" (UID: \"899b7e7f-0b2f-48f9-8b7e-c86614b06f41\") " pod="openstack/nova-cell0-conductor-db-sync-464ng" Oct 11 05:09:33 crc kubenswrapper[4651]: I1011 05:09:33.062319 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/899b7e7f-0b2f-48f9-8b7e-c86614b06f41-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-464ng\" (UID: \"899b7e7f-0b2f-48f9-8b7e-c86614b06f41\") " pod="openstack/nova-cell0-conductor-db-sync-464ng" Oct 11 05:09:33 crc kubenswrapper[4651]: I1011 05:09:33.066243 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jr2jr\" (UniqueName: \"kubernetes.io/projected/899b7e7f-0b2f-48f9-8b7e-c86614b06f41-kube-api-access-jr2jr\") pod \"nova-cell0-conductor-db-sync-464ng\" (UID: \"899b7e7f-0b2f-48f9-8b7e-c86614b06f41\") " pod="openstack/nova-cell0-conductor-db-sync-464ng" Oct 11 05:09:33 crc kubenswrapper[4651]: I1011 05:09:33.066285 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/899b7e7f-0b2f-48f9-8b7e-c86614b06f41-scripts\") pod \"nova-cell0-conductor-db-sync-464ng\" (UID: \"899b7e7f-0b2f-48f9-8b7e-c86614b06f41\") " pod="openstack/nova-cell0-conductor-db-sync-464ng" Oct 11 05:09:33 crc kubenswrapper[4651]: I1011 05:09:33.198448 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-464ng" Oct 11 05:09:33 crc kubenswrapper[4651]: I1011 05:09:33.705986 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-464ng"] Oct 11 05:09:34 crc kubenswrapper[4651]: I1011 05:09:34.707028 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Oct 11 05:09:34 crc kubenswrapper[4651]: I1011 05:09:34.716500 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-464ng" event={"ID":"899b7e7f-0b2f-48f9-8b7e-c86614b06f41","Type":"ContainerStarted","Data":"52085ce43c98d8fac55e78150a6ef2736ed2a296b486825b7b615c9a15bcae9d"} Oct 11 05:09:34 crc kubenswrapper[4651]: I1011 05:09:34.716552 4651 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 11 05:09:34 crc kubenswrapper[4651]: I1011 05:09:34.838410 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Oct 11 05:09:35 crc kubenswrapper[4651]: I1011 05:09:35.815647 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 11 05:09:35 crc kubenswrapper[4651]: I1011 05:09:35.815964 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="4eef40bb-34c1-42bd-acef-5a58f3737fa1" containerName="ceilometer-central-agent" containerID="cri-o://dcc11aa8f58e46b93834965562036ce361ca05d52c4144d1b7a8c0b091723b2a" gracePeriod=30 Oct 11 05:09:35 crc kubenswrapper[4651]: I1011 05:09:35.816084 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="4eef40bb-34c1-42bd-acef-5a58f3737fa1" containerName="ceilometer-notification-agent" containerID="cri-o://4e6fd4ac980d59f619b0dc177c204d462d629eb3f0c8268921b4353231bb9ec1" gracePeriod=30 Oct 11 05:09:35 crc kubenswrapper[4651]: I1011 05:09:35.816082 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="4eef40bb-34c1-42bd-acef-5a58f3737fa1" containerName="proxy-httpd" containerID="cri-o://fbbed0d189b365de6dc886b523afc5e658a4b3e09c5924764e4888227e080ee0" gracePeriod=30 Oct 11 05:09:35 crc kubenswrapper[4651]: I1011 05:09:35.816579 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="4eef40bb-34c1-42bd-acef-5a58f3737fa1" containerName="sg-core" containerID="cri-o://1d54f4fded1b6a9a87f4fffd04eae084cda01bfbe6537be6617a4cc9d36534f2" gracePeriod=30 Oct 11 05:09:36 crc kubenswrapper[4651]: I1011 05:09:36.698667 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 11 05:09:36 crc kubenswrapper[4651]: I1011 05:09:36.746337 4651 generic.go:334] "Generic (PLEG): container finished" podID="4eef40bb-34c1-42bd-acef-5a58f3737fa1" containerID="fbbed0d189b365de6dc886b523afc5e658a4b3e09c5924764e4888227e080ee0" exitCode=0 Oct 11 05:09:36 crc kubenswrapper[4651]: I1011 05:09:36.746376 4651 generic.go:334] "Generic (PLEG): container finished" podID="4eef40bb-34c1-42bd-acef-5a58f3737fa1" containerID="1d54f4fded1b6a9a87f4fffd04eae084cda01bfbe6537be6617a4cc9d36534f2" exitCode=2 Oct 11 05:09:36 crc kubenswrapper[4651]: I1011 05:09:36.746388 4651 generic.go:334] "Generic (PLEG): container finished" podID="4eef40bb-34c1-42bd-acef-5a58f3737fa1" containerID="4e6fd4ac980d59f619b0dc177c204d462d629eb3f0c8268921b4353231bb9ec1" exitCode=0 Oct 11 05:09:36 crc kubenswrapper[4651]: I1011 05:09:36.746398 4651 generic.go:334] "Generic (PLEG): container finished" podID="4eef40bb-34c1-42bd-acef-5a58f3737fa1" containerID="dcc11aa8f58e46b93834965562036ce361ca05d52c4144d1b7a8c0b091723b2a" exitCode=0 Oct 11 05:09:36 crc kubenswrapper[4651]: I1011 05:09:36.746419 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4eef40bb-34c1-42bd-acef-5a58f3737fa1","Type":"ContainerDied","Data":"fbbed0d189b365de6dc886b523afc5e658a4b3e09c5924764e4888227e080ee0"} Oct 11 05:09:36 crc kubenswrapper[4651]: I1011 05:09:36.746455 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4eef40bb-34c1-42bd-acef-5a58f3737fa1","Type":"ContainerDied","Data":"1d54f4fded1b6a9a87f4fffd04eae084cda01bfbe6537be6617a4cc9d36534f2"} Oct 11 05:09:36 crc kubenswrapper[4651]: I1011 05:09:36.746465 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4eef40bb-34c1-42bd-acef-5a58f3737fa1","Type":"ContainerDied","Data":"4e6fd4ac980d59f619b0dc177c204d462d629eb3f0c8268921b4353231bb9ec1"} Oct 11 05:09:36 crc kubenswrapper[4651]: I1011 05:09:36.746474 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4eef40bb-34c1-42bd-acef-5a58f3737fa1","Type":"ContainerDied","Data":"dcc11aa8f58e46b93834965562036ce361ca05d52c4144d1b7a8c0b091723b2a"} Oct 11 05:09:36 crc kubenswrapper[4651]: I1011 05:09:36.746483 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4eef40bb-34c1-42bd-acef-5a58f3737fa1","Type":"ContainerDied","Data":"622ce5d5f78351b7b6d09150e5342e1603473a0700d493fddfceeadc311304e3"} Oct 11 05:09:36 crc kubenswrapper[4651]: I1011 05:09:36.746498 4651 scope.go:117] "RemoveContainer" containerID="fbbed0d189b365de6dc886b523afc5e658a4b3e09c5924764e4888227e080ee0" Oct 11 05:09:36 crc kubenswrapper[4651]: I1011 05:09:36.746396 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 11 05:09:36 crc kubenswrapper[4651]: I1011 05:09:36.834155 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zwcq5\" (UniqueName: \"kubernetes.io/projected/4eef40bb-34c1-42bd-acef-5a58f3737fa1-kube-api-access-zwcq5\") pod \"4eef40bb-34c1-42bd-acef-5a58f3737fa1\" (UID: \"4eef40bb-34c1-42bd-acef-5a58f3737fa1\") " Oct 11 05:09:36 crc kubenswrapper[4651]: I1011 05:09:36.835164 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/4eef40bb-34c1-42bd-acef-5a58f3737fa1-ceilometer-tls-certs\") pod \"4eef40bb-34c1-42bd-acef-5a58f3737fa1\" (UID: \"4eef40bb-34c1-42bd-acef-5a58f3737fa1\") " Oct 11 05:09:36 crc kubenswrapper[4651]: I1011 05:09:36.835365 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4eef40bb-34c1-42bd-acef-5a58f3737fa1-scripts\") pod \"4eef40bb-34c1-42bd-acef-5a58f3737fa1\" (UID: \"4eef40bb-34c1-42bd-acef-5a58f3737fa1\") " Oct 11 05:09:36 crc kubenswrapper[4651]: I1011 05:09:36.835462 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4eef40bb-34c1-42bd-acef-5a58f3737fa1-log-httpd\") pod \"4eef40bb-34c1-42bd-acef-5a58f3737fa1\" (UID: \"4eef40bb-34c1-42bd-acef-5a58f3737fa1\") " Oct 11 05:09:36 crc kubenswrapper[4651]: I1011 05:09:36.835503 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4eef40bb-34c1-42bd-acef-5a58f3737fa1-config-data\") pod \"4eef40bb-34c1-42bd-acef-5a58f3737fa1\" (UID: \"4eef40bb-34c1-42bd-acef-5a58f3737fa1\") " Oct 11 05:09:36 crc kubenswrapper[4651]: I1011 05:09:36.835542 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4eef40bb-34c1-42bd-acef-5a58f3737fa1-run-httpd\") pod \"4eef40bb-34c1-42bd-acef-5a58f3737fa1\" (UID: \"4eef40bb-34c1-42bd-acef-5a58f3737fa1\") " Oct 11 05:09:36 crc kubenswrapper[4651]: I1011 05:09:36.835650 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/4eef40bb-34c1-42bd-acef-5a58f3737fa1-sg-core-conf-yaml\") pod \"4eef40bb-34c1-42bd-acef-5a58f3737fa1\" (UID: \"4eef40bb-34c1-42bd-acef-5a58f3737fa1\") " Oct 11 05:09:36 crc kubenswrapper[4651]: I1011 05:09:36.835682 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4eef40bb-34c1-42bd-acef-5a58f3737fa1-combined-ca-bundle\") pod \"4eef40bb-34c1-42bd-acef-5a58f3737fa1\" (UID: \"4eef40bb-34c1-42bd-acef-5a58f3737fa1\") " Oct 11 05:09:36 crc kubenswrapper[4651]: I1011 05:09:36.836366 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4eef40bb-34c1-42bd-acef-5a58f3737fa1-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "4eef40bb-34c1-42bd-acef-5a58f3737fa1" (UID: "4eef40bb-34c1-42bd-acef-5a58f3737fa1"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 05:09:36 crc kubenswrapper[4651]: I1011 05:09:36.837320 4651 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4eef40bb-34c1-42bd-acef-5a58f3737fa1-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 11 05:09:36 crc kubenswrapper[4651]: I1011 05:09:36.837397 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4eef40bb-34c1-42bd-acef-5a58f3737fa1-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "4eef40bb-34c1-42bd-acef-5a58f3737fa1" (UID: "4eef40bb-34c1-42bd-acef-5a58f3737fa1"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 05:09:36 crc kubenswrapper[4651]: I1011 05:09:36.846771 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4eef40bb-34c1-42bd-acef-5a58f3737fa1-scripts" (OuterVolumeSpecName: "scripts") pod "4eef40bb-34c1-42bd-acef-5a58f3737fa1" (UID: "4eef40bb-34c1-42bd-acef-5a58f3737fa1"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:09:36 crc kubenswrapper[4651]: I1011 05:09:36.847918 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4eef40bb-34c1-42bd-acef-5a58f3737fa1-kube-api-access-zwcq5" (OuterVolumeSpecName: "kube-api-access-zwcq5") pod "4eef40bb-34c1-42bd-acef-5a58f3737fa1" (UID: "4eef40bb-34c1-42bd-acef-5a58f3737fa1"). InnerVolumeSpecName "kube-api-access-zwcq5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:09:36 crc kubenswrapper[4651]: I1011 05:09:36.867006 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4eef40bb-34c1-42bd-acef-5a58f3737fa1-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "4eef40bb-34c1-42bd-acef-5a58f3737fa1" (UID: "4eef40bb-34c1-42bd-acef-5a58f3737fa1"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:09:36 crc kubenswrapper[4651]: I1011 05:09:36.899506 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4eef40bb-34c1-42bd-acef-5a58f3737fa1-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "4eef40bb-34c1-42bd-acef-5a58f3737fa1" (UID: "4eef40bb-34c1-42bd-acef-5a58f3737fa1"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:09:36 crc kubenswrapper[4651]: I1011 05:09:36.903727 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4eef40bb-34c1-42bd-acef-5a58f3737fa1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4eef40bb-34c1-42bd-acef-5a58f3737fa1" (UID: "4eef40bb-34c1-42bd-acef-5a58f3737fa1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:09:36 crc kubenswrapper[4651]: I1011 05:09:36.926793 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4eef40bb-34c1-42bd-acef-5a58f3737fa1-config-data" (OuterVolumeSpecName: "config-data") pod "4eef40bb-34c1-42bd-acef-5a58f3737fa1" (UID: "4eef40bb-34c1-42bd-acef-5a58f3737fa1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:09:36 crc kubenswrapper[4651]: I1011 05:09:36.939132 4651 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/4eef40bb-34c1-42bd-acef-5a58f3737fa1-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 11 05:09:36 crc kubenswrapper[4651]: I1011 05:09:36.939164 4651 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4eef40bb-34c1-42bd-acef-5a58f3737fa1-scripts\") on node \"crc\" DevicePath \"\"" Oct 11 05:09:36 crc kubenswrapper[4651]: I1011 05:09:36.939173 4651 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4eef40bb-34c1-42bd-acef-5a58f3737fa1-config-data\") on node \"crc\" DevicePath \"\"" Oct 11 05:09:36 crc kubenswrapper[4651]: I1011 05:09:36.939181 4651 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4eef40bb-34c1-42bd-acef-5a58f3737fa1-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 11 05:09:36 crc kubenswrapper[4651]: I1011 05:09:36.939190 4651 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/4eef40bb-34c1-42bd-acef-5a58f3737fa1-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 11 05:09:36 crc kubenswrapper[4651]: I1011 05:09:36.939198 4651 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4eef40bb-34c1-42bd-acef-5a58f3737fa1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 05:09:36 crc kubenswrapper[4651]: I1011 05:09:36.939206 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zwcq5\" (UniqueName: \"kubernetes.io/projected/4eef40bb-34c1-42bd-acef-5a58f3737fa1-kube-api-access-zwcq5\") on node \"crc\" DevicePath \"\"" Oct 11 05:09:37 crc kubenswrapper[4651]: I1011 05:09:37.080474 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 11 05:09:37 crc kubenswrapper[4651]: I1011 05:09:37.093452 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 11 05:09:37 crc kubenswrapper[4651]: I1011 05:09:37.104988 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 11 05:09:37 crc kubenswrapper[4651]: E1011 05:09:37.105538 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4eef40bb-34c1-42bd-acef-5a58f3737fa1" containerName="sg-core" Oct 11 05:09:37 crc kubenswrapper[4651]: I1011 05:09:37.105565 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="4eef40bb-34c1-42bd-acef-5a58f3737fa1" containerName="sg-core" Oct 11 05:09:37 crc kubenswrapper[4651]: E1011 05:09:37.105586 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4eef40bb-34c1-42bd-acef-5a58f3737fa1" containerName="ceilometer-notification-agent" Oct 11 05:09:37 crc kubenswrapper[4651]: I1011 05:09:37.105594 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="4eef40bb-34c1-42bd-acef-5a58f3737fa1" containerName="ceilometer-notification-agent" Oct 11 05:09:37 crc kubenswrapper[4651]: E1011 05:09:37.105613 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4eef40bb-34c1-42bd-acef-5a58f3737fa1" containerName="ceilometer-central-agent" Oct 11 05:09:37 crc kubenswrapper[4651]: I1011 05:09:37.105621 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="4eef40bb-34c1-42bd-acef-5a58f3737fa1" containerName="ceilometer-central-agent" Oct 11 05:09:37 crc kubenswrapper[4651]: E1011 05:09:37.105644 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4eef40bb-34c1-42bd-acef-5a58f3737fa1" containerName="proxy-httpd" Oct 11 05:09:37 crc kubenswrapper[4651]: I1011 05:09:37.105652 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="4eef40bb-34c1-42bd-acef-5a58f3737fa1" containerName="proxy-httpd" Oct 11 05:09:37 crc kubenswrapper[4651]: I1011 05:09:37.105871 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="4eef40bb-34c1-42bd-acef-5a58f3737fa1" containerName="ceilometer-central-agent" Oct 11 05:09:37 crc kubenswrapper[4651]: I1011 05:09:37.105896 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="4eef40bb-34c1-42bd-acef-5a58f3737fa1" containerName="proxy-httpd" Oct 11 05:09:37 crc kubenswrapper[4651]: I1011 05:09:37.105909 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="4eef40bb-34c1-42bd-acef-5a58f3737fa1" containerName="ceilometer-notification-agent" Oct 11 05:09:37 crc kubenswrapper[4651]: I1011 05:09:37.105934 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="4eef40bb-34c1-42bd-acef-5a58f3737fa1" containerName="sg-core" Oct 11 05:09:37 crc kubenswrapper[4651]: I1011 05:09:37.107773 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 11 05:09:37 crc kubenswrapper[4651]: I1011 05:09:37.110592 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 11 05:09:37 crc kubenswrapper[4651]: I1011 05:09:37.110739 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 11 05:09:37 crc kubenswrapper[4651]: I1011 05:09:37.110754 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Oct 11 05:09:37 crc kubenswrapper[4651]: I1011 05:09:37.114972 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 11 05:09:37 crc kubenswrapper[4651]: I1011 05:09:37.243211 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/074c84b3-f777-4c05-89dc-6e55dd72a2b9-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"074c84b3-f777-4c05-89dc-6e55dd72a2b9\") " pod="openstack/ceilometer-0" Oct 11 05:09:37 crc kubenswrapper[4651]: I1011 05:09:37.243306 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/074c84b3-f777-4c05-89dc-6e55dd72a2b9-scripts\") pod \"ceilometer-0\" (UID: \"074c84b3-f777-4c05-89dc-6e55dd72a2b9\") " pod="openstack/ceilometer-0" Oct 11 05:09:37 crc kubenswrapper[4651]: I1011 05:09:37.243326 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/074c84b3-f777-4c05-89dc-6e55dd72a2b9-log-httpd\") pod \"ceilometer-0\" (UID: \"074c84b3-f777-4c05-89dc-6e55dd72a2b9\") " pod="openstack/ceilometer-0" Oct 11 05:09:37 crc kubenswrapper[4651]: I1011 05:09:37.243401 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/074c84b3-f777-4c05-89dc-6e55dd72a2b9-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"074c84b3-f777-4c05-89dc-6e55dd72a2b9\") " pod="openstack/ceilometer-0" Oct 11 05:09:37 crc kubenswrapper[4651]: I1011 05:09:37.243491 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9rc28\" (UniqueName: \"kubernetes.io/projected/074c84b3-f777-4c05-89dc-6e55dd72a2b9-kube-api-access-9rc28\") pod \"ceilometer-0\" (UID: \"074c84b3-f777-4c05-89dc-6e55dd72a2b9\") " pod="openstack/ceilometer-0" Oct 11 05:09:37 crc kubenswrapper[4651]: I1011 05:09:37.243587 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/074c84b3-f777-4c05-89dc-6e55dd72a2b9-config-data\") pod \"ceilometer-0\" (UID: \"074c84b3-f777-4c05-89dc-6e55dd72a2b9\") " pod="openstack/ceilometer-0" Oct 11 05:09:37 crc kubenswrapper[4651]: I1011 05:09:37.243652 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/074c84b3-f777-4c05-89dc-6e55dd72a2b9-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"074c84b3-f777-4c05-89dc-6e55dd72a2b9\") " pod="openstack/ceilometer-0" Oct 11 05:09:37 crc kubenswrapper[4651]: I1011 05:09:37.243768 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/074c84b3-f777-4c05-89dc-6e55dd72a2b9-run-httpd\") pod \"ceilometer-0\" (UID: \"074c84b3-f777-4c05-89dc-6e55dd72a2b9\") " pod="openstack/ceilometer-0" Oct 11 05:09:37 crc kubenswrapper[4651]: I1011 05:09:37.345215 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/074c84b3-f777-4c05-89dc-6e55dd72a2b9-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"074c84b3-f777-4c05-89dc-6e55dd72a2b9\") " pod="openstack/ceilometer-0" Oct 11 05:09:37 crc kubenswrapper[4651]: I1011 05:09:37.345310 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/074c84b3-f777-4c05-89dc-6e55dd72a2b9-scripts\") pod \"ceilometer-0\" (UID: \"074c84b3-f777-4c05-89dc-6e55dd72a2b9\") " pod="openstack/ceilometer-0" Oct 11 05:09:37 crc kubenswrapper[4651]: I1011 05:09:37.345333 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/074c84b3-f777-4c05-89dc-6e55dd72a2b9-log-httpd\") pod \"ceilometer-0\" (UID: \"074c84b3-f777-4c05-89dc-6e55dd72a2b9\") " pod="openstack/ceilometer-0" Oct 11 05:09:37 crc kubenswrapper[4651]: I1011 05:09:37.345352 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/074c84b3-f777-4c05-89dc-6e55dd72a2b9-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"074c84b3-f777-4c05-89dc-6e55dd72a2b9\") " pod="openstack/ceilometer-0" Oct 11 05:09:37 crc kubenswrapper[4651]: I1011 05:09:37.345386 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9rc28\" (UniqueName: \"kubernetes.io/projected/074c84b3-f777-4c05-89dc-6e55dd72a2b9-kube-api-access-9rc28\") pod \"ceilometer-0\" (UID: \"074c84b3-f777-4c05-89dc-6e55dd72a2b9\") " pod="openstack/ceilometer-0" Oct 11 05:09:37 crc kubenswrapper[4651]: I1011 05:09:37.345417 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/074c84b3-f777-4c05-89dc-6e55dd72a2b9-config-data\") pod \"ceilometer-0\" (UID: \"074c84b3-f777-4c05-89dc-6e55dd72a2b9\") " pod="openstack/ceilometer-0" Oct 11 05:09:37 crc kubenswrapper[4651]: I1011 05:09:37.345444 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/074c84b3-f777-4c05-89dc-6e55dd72a2b9-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"074c84b3-f777-4c05-89dc-6e55dd72a2b9\") " pod="openstack/ceilometer-0" Oct 11 05:09:37 crc kubenswrapper[4651]: I1011 05:09:37.345477 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/074c84b3-f777-4c05-89dc-6e55dd72a2b9-run-httpd\") pod \"ceilometer-0\" (UID: \"074c84b3-f777-4c05-89dc-6e55dd72a2b9\") " pod="openstack/ceilometer-0" Oct 11 05:09:37 crc kubenswrapper[4651]: I1011 05:09:37.345791 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/074c84b3-f777-4c05-89dc-6e55dd72a2b9-log-httpd\") pod \"ceilometer-0\" (UID: \"074c84b3-f777-4c05-89dc-6e55dd72a2b9\") " pod="openstack/ceilometer-0" Oct 11 05:09:37 crc kubenswrapper[4651]: I1011 05:09:37.345839 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/074c84b3-f777-4c05-89dc-6e55dd72a2b9-run-httpd\") pod \"ceilometer-0\" (UID: \"074c84b3-f777-4c05-89dc-6e55dd72a2b9\") " pod="openstack/ceilometer-0" Oct 11 05:09:37 crc kubenswrapper[4651]: I1011 05:09:37.349192 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/074c84b3-f777-4c05-89dc-6e55dd72a2b9-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"074c84b3-f777-4c05-89dc-6e55dd72a2b9\") " pod="openstack/ceilometer-0" Oct 11 05:09:37 crc kubenswrapper[4651]: I1011 05:09:37.352862 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/074c84b3-f777-4c05-89dc-6e55dd72a2b9-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"074c84b3-f777-4c05-89dc-6e55dd72a2b9\") " pod="openstack/ceilometer-0" Oct 11 05:09:37 crc kubenswrapper[4651]: I1011 05:09:37.353006 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/074c84b3-f777-4c05-89dc-6e55dd72a2b9-scripts\") pod \"ceilometer-0\" (UID: \"074c84b3-f777-4c05-89dc-6e55dd72a2b9\") " pod="openstack/ceilometer-0" Oct 11 05:09:37 crc kubenswrapper[4651]: I1011 05:09:37.353529 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/074c84b3-f777-4c05-89dc-6e55dd72a2b9-config-data\") pod \"ceilometer-0\" (UID: \"074c84b3-f777-4c05-89dc-6e55dd72a2b9\") " pod="openstack/ceilometer-0" Oct 11 05:09:37 crc kubenswrapper[4651]: I1011 05:09:37.357583 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/074c84b3-f777-4c05-89dc-6e55dd72a2b9-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"074c84b3-f777-4c05-89dc-6e55dd72a2b9\") " pod="openstack/ceilometer-0" Oct 11 05:09:37 crc kubenswrapper[4651]: I1011 05:09:37.364418 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9rc28\" (UniqueName: \"kubernetes.io/projected/074c84b3-f777-4c05-89dc-6e55dd72a2b9-kube-api-access-9rc28\") pod \"ceilometer-0\" (UID: \"074c84b3-f777-4c05-89dc-6e55dd72a2b9\") " pod="openstack/ceilometer-0" Oct 11 05:09:37 crc kubenswrapper[4651]: I1011 05:09:37.435033 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 11 05:09:37 crc kubenswrapper[4651]: I1011 05:09:37.879558 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4eef40bb-34c1-42bd-acef-5a58f3737fa1" path="/var/lib/kubelet/pods/4eef40bb-34c1-42bd-acef-5a58f3737fa1/volumes" Oct 11 05:09:41 crc kubenswrapper[4651]: I1011 05:09:41.297854 4651 scope.go:117] "RemoveContainer" containerID="1d54f4fded1b6a9a87f4fffd04eae084cda01bfbe6537be6617a4cc9d36534f2" Oct 11 05:09:41 crc kubenswrapper[4651]: I1011 05:09:41.343054 4651 scope.go:117] "RemoveContainer" containerID="4e6fd4ac980d59f619b0dc177c204d462d629eb3f0c8268921b4353231bb9ec1" Oct 11 05:09:41 crc kubenswrapper[4651]: I1011 05:09:41.511554 4651 scope.go:117] "RemoveContainer" containerID="dcc11aa8f58e46b93834965562036ce361ca05d52c4144d1b7a8c0b091723b2a" Oct 11 05:09:41 crc kubenswrapper[4651]: I1011 05:09:41.542743 4651 scope.go:117] "RemoveContainer" containerID="fbbed0d189b365de6dc886b523afc5e658a4b3e09c5924764e4888227e080ee0" Oct 11 05:09:41 crc kubenswrapper[4651]: E1011 05:09:41.544030 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fbbed0d189b365de6dc886b523afc5e658a4b3e09c5924764e4888227e080ee0\": container with ID starting with fbbed0d189b365de6dc886b523afc5e658a4b3e09c5924764e4888227e080ee0 not found: ID does not exist" containerID="fbbed0d189b365de6dc886b523afc5e658a4b3e09c5924764e4888227e080ee0" Oct 11 05:09:41 crc kubenswrapper[4651]: I1011 05:09:41.544143 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fbbed0d189b365de6dc886b523afc5e658a4b3e09c5924764e4888227e080ee0"} err="failed to get container status \"fbbed0d189b365de6dc886b523afc5e658a4b3e09c5924764e4888227e080ee0\": rpc error: code = NotFound desc = could not find container \"fbbed0d189b365de6dc886b523afc5e658a4b3e09c5924764e4888227e080ee0\": container with ID starting with fbbed0d189b365de6dc886b523afc5e658a4b3e09c5924764e4888227e080ee0 not found: ID does not exist" Oct 11 05:09:41 crc kubenswrapper[4651]: I1011 05:09:41.544189 4651 scope.go:117] "RemoveContainer" containerID="1d54f4fded1b6a9a87f4fffd04eae084cda01bfbe6537be6617a4cc9d36534f2" Oct 11 05:09:41 crc kubenswrapper[4651]: E1011 05:09:41.545262 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1d54f4fded1b6a9a87f4fffd04eae084cda01bfbe6537be6617a4cc9d36534f2\": container with ID starting with 1d54f4fded1b6a9a87f4fffd04eae084cda01bfbe6537be6617a4cc9d36534f2 not found: ID does not exist" containerID="1d54f4fded1b6a9a87f4fffd04eae084cda01bfbe6537be6617a4cc9d36534f2" Oct 11 05:09:41 crc kubenswrapper[4651]: I1011 05:09:41.545324 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1d54f4fded1b6a9a87f4fffd04eae084cda01bfbe6537be6617a4cc9d36534f2"} err="failed to get container status \"1d54f4fded1b6a9a87f4fffd04eae084cda01bfbe6537be6617a4cc9d36534f2\": rpc error: code = NotFound desc = could not find container \"1d54f4fded1b6a9a87f4fffd04eae084cda01bfbe6537be6617a4cc9d36534f2\": container with ID starting with 1d54f4fded1b6a9a87f4fffd04eae084cda01bfbe6537be6617a4cc9d36534f2 not found: ID does not exist" Oct 11 05:09:41 crc kubenswrapper[4651]: I1011 05:09:41.545365 4651 scope.go:117] "RemoveContainer" containerID="4e6fd4ac980d59f619b0dc177c204d462d629eb3f0c8268921b4353231bb9ec1" Oct 11 05:09:41 crc kubenswrapper[4651]: E1011 05:09:41.545726 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4e6fd4ac980d59f619b0dc177c204d462d629eb3f0c8268921b4353231bb9ec1\": container with ID starting with 4e6fd4ac980d59f619b0dc177c204d462d629eb3f0c8268921b4353231bb9ec1 not found: ID does not exist" containerID="4e6fd4ac980d59f619b0dc177c204d462d629eb3f0c8268921b4353231bb9ec1" Oct 11 05:09:41 crc kubenswrapper[4651]: I1011 05:09:41.545760 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4e6fd4ac980d59f619b0dc177c204d462d629eb3f0c8268921b4353231bb9ec1"} err="failed to get container status \"4e6fd4ac980d59f619b0dc177c204d462d629eb3f0c8268921b4353231bb9ec1\": rpc error: code = NotFound desc = could not find container \"4e6fd4ac980d59f619b0dc177c204d462d629eb3f0c8268921b4353231bb9ec1\": container with ID starting with 4e6fd4ac980d59f619b0dc177c204d462d629eb3f0c8268921b4353231bb9ec1 not found: ID does not exist" Oct 11 05:09:41 crc kubenswrapper[4651]: I1011 05:09:41.545780 4651 scope.go:117] "RemoveContainer" containerID="dcc11aa8f58e46b93834965562036ce361ca05d52c4144d1b7a8c0b091723b2a" Oct 11 05:09:41 crc kubenswrapper[4651]: E1011 05:09:41.546099 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dcc11aa8f58e46b93834965562036ce361ca05d52c4144d1b7a8c0b091723b2a\": container with ID starting with dcc11aa8f58e46b93834965562036ce361ca05d52c4144d1b7a8c0b091723b2a not found: ID does not exist" containerID="dcc11aa8f58e46b93834965562036ce361ca05d52c4144d1b7a8c0b091723b2a" Oct 11 05:09:41 crc kubenswrapper[4651]: I1011 05:09:41.546129 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dcc11aa8f58e46b93834965562036ce361ca05d52c4144d1b7a8c0b091723b2a"} err="failed to get container status \"dcc11aa8f58e46b93834965562036ce361ca05d52c4144d1b7a8c0b091723b2a\": rpc error: code = NotFound desc = could not find container \"dcc11aa8f58e46b93834965562036ce361ca05d52c4144d1b7a8c0b091723b2a\": container with ID starting with dcc11aa8f58e46b93834965562036ce361ca05d52c4144d1b7a8c0b091723b2a not found: ID does not exist" Oct 11 05:09:41 crc kubenswrapper[4651]: I1011 05:09:41.546146 4651 scope.go:117] "RemoveContainer" containerID="fbbed0d189b365de6dc886b523afc5e658a4b3e09c5924764e4888227e080ee0" Oct 11 05:09:41 crc kubenswrapper[4651]: I1011 05:09:41.546496 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fbbed0d189b365de6dc886b523afc5e658a4b3e09c5924764e4888227e080ee0"} err="failed to get container status \"fbbed0d189b365de6dc886b523afc5e658a4b3e09c5924764e4888227e080ee0\": rpc error: code = NotFound desc = could not find container \"fbbed0d189b365de6dc886b523afc5e658a4b3e09c5924764e4888227e080ee0\": container with ID starting with fbbed0d189b365de6dc886b523afc5e658a4b3e09c5924764e4888227e080ee0 not found: ID does not exist" Oct 11 05:09:41 crc kubenswrapper[4651]: I1011 05:09:41.546523 4651 scope.go:117] "RemoveContainer" containerID="1d54f4fded1b6a9a87f4fffd04eae084cda01bfbe6537be6617a4cc9d36534f2" Oct 11 05:09:41 crc kubenswrapper[4651]: I1011 05:09:41.546734 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1d54f4fded1b6a9a87f4fffd04eae084cda01bfbe6537be6617a4cc9d36534f2"} err="failed to get container status \"1d54f4fded1b6a9a87f4fffd04eae084cda01bfbe6537be6617a4cc9d36534f2\": rpc error: code = NotFound desc = could not find container \"1d54f4fded1b6a9a87f4fffd04eae084cda01bfbe6537be6617a4cc9d36534f2\": container with ID starting with 1d54f4fded1b6a9a87f4fffd04eae084cda01bfbe6537be6617a4cc9d36534f2 not found: ID does not exist" Oct 11 05:09:41 crc kubenswrapper[4651]: I1011 05:09:41.546775 4651 scope.go:117] "RemoveContainer" containerID="4e6fd4ac980d59f619b0dc177c204d462d629eb3f0c8268921b4353231bb9ec1" Oct 11 05:09:41 crc kubenswrapper[4651]: I1011 05:09:41.547670 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4e6fd4ac980d59f619b0dc177c204d462d629eb3f0c8268921b4353231bb9ec1"} err="failed to get container status \"4e6fd4ac980d59f619b0dc177c204d462d629eb3f0c8268921b4353231bb9ec1\": rpc error: code = NotFound desc = could not find container \"4e6fd4ac980d59f619b0dc177c204d462d629eb3f0c8268921b4353231bb9ec1\": container with ID starting with 4e6fd4ac980d59f619b0dc177c204d462d629eb3f0c8268921b4353231bb9ec1 not found: ID does not exist" Oct 11 05:09:41 crc kubenswrapper[4651]: I1011 05:09:41.547714 4651 scope.go:117] "RemoveContainer" containerID="dcc11aa8f58e46b93834965562036ce361ca05d52c4144d1b7a8c0b091723b2a" Oct 11 05:09:41 crc kubenswrapper[4651]: I1011 05:09:41.548066 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dcc11aa8f58e46b93834965562036ce361ca05d52c4144d1b7a8c0b091723b2a"} err="failed to get container status \"dcc11aa8f58e46b93834965562036ce361ca05d52c4144d1b7a8c0b091723b2a\": rpc error: code = NotFound desc = could not find container \"dcc11aa8f58e46b93834965562036ce361ca05d52c4144d1b7a8c0b091723b2a\": container with ID starting with dcc11aa8f58e46b93834965562036ce361ca05d52c4144d1b7a8c0b091723b2a not found: ID does not exist" Oct 11 05:09:41 crc kubenswrapper[4651]: I1011 05:09:41.548092 4651 scope.go:117] "RemoveContainer" containerID="fbbed0d189b365de6dc886b523afc5e658a4b3e09c5924764e4888227e080ee0" Oct 11 05:09:41 crc kubenswrapper[4651]: I1011 05:09:41.548403 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fbbed0d189b365de6dc886b523afc5e658a4b3e09c5924764e4888227e080ee0"} err="failed to get container status \"fbbed0d189b365de6dc886b523afc5e658a4b3e09c5924764e4888227e080ee0\": rpc error: code = NotFound desc = could not find container \"fbbed0d189b365de6dc886b523afc5e658a4b3e09c5924764e4888227e080ee0\": container with ID starting with fbbed0d189b365de6dc886b523afc5e658a4b3e09c5924764e4888227e080ee0 not found: ID does not exist" Oct 11 05:09:41 crc kubenswrapper[4651]: I1011 05:09:41.548431 4651 scope.go:117] "RemoveContainer" containerID="1d54f4fded1b6a9a87f4fffd04eae084cda01bfbe6537be6617a4cc9d36534f2" Oct 11 05:09:41 crc kubenswrapper[4651]: I1011 05:09:41.548660 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1d54f4fded1b6a9a87f4fffd04eae084cda01bfbe6537be6617a4cc9d36534f2"} err="failed to get container status \"1d54f4fded1b6a9a87f4fffd04eae084cda01bfbe6537be6617a4cc9d36534f2\": rpc error: code = NotFound desc = could not find container \"1d54f4fded1b6a9a87f4fffd04eae084cda01bfbe6537be6617a4cc9d36534f2\": container with ID starting with 1d54f4fded1b6a9a87f4fffd04eae084cda01bfbe6537be6617a4cc9d36534f2 not found: ID does not exist" Oct 11 05:09:41 crc kubenswrapper[4651]: I1011 05:09:41.548697 4651 scope.go:117] "RemoveContainer" containerID="4e6fd4ac980d59f619b0dc177c204d462d629eb3f0c8268921b4353231bb9ec1" Oct 11 05:09:41 crc kubenswrapper[4651]: I1011 05:09:41.548948 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4e6fd4ac980d59f619b0dc177c204d462d629eb3f0c8268921b4353231bb9ec1"} err="failed to get container status \"4e6fd4ac980d59f619b0dc177c204d462d629eb3f0c8268921b4353231bb9ec1\": rpc error: code = NotFound desc = could not find container \"4e6fd4ac980d59f619b0dc177c204d462d629eb3f0c8268921b4353231bb9ec1\": container with ID starting with 4e6fd4ac980d59f619b0dc177c204d462d629eb3f0c8268921b4353231bb9ec1 not found: ID does not exist" Oct 11 05:09:41 crc kubenswrapper[4651]: I1011 05:09:41.548974 4651 scope.go:117] "RemoveContainer" containerID="dcc11aa8f58e46b93834965562036ce361ca05d52c4144d1b7a8c0b091723b2a" Oct 11 05:09:41 crc kubenswrapper[4651]: I1011 05:09:41.549862 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dcc11aa8f58e46b93834965562036ce361ca05d52c4144d1b7a8c0b091723b2a"} err="failed to get container status \"dcc11aa8f58e46b93834965562036ce361ca05d52c4144d1b7a8c0b091723b2a\": rpc error: code = NotFound desc = could not find container \"dcc11aa8f58e46b93834965562036ce361ca05d52c4144d1b7a8c0b091723b2a\": container with ID starting with dcc11aa8f58e46b93834965562036ce361ca05d52c4144d1b7a8c0b091723b2a not found: ID does not exist" Oct 11 05:09:41 crc kubenswrapper[4651]: I1011 05:09:41.549886 4651 scope.go:117] "RemoveContainer" containerID="fbbed0d189b365de6dc886b523afc5e658a4b3e09c5924764e4888227e080ee0" Oct 11 05:09:41 crc kubenswrapper[4651]: I1011 05:09:41.550242 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fbbed0d189b365de6dc886b523afc5e658a4b3e09c5924764e4888227e080ee0"} err="failed to get container status \"fbbed0d189b365de6dc886b523afc5e658a4b3e09c5924764e4888227e080ee0\": rpc error: code = NotFound desc = could not find container \"fbbed0d189b365de6dc886b523afc5e658a4b3e09c5924764e4888227e080ee0\": container with ID starting with fbbed0d189b365de6dc886b523afc5e658a4b3e09c5924764e4888227e080ee0 not found: ID does not exist" Oct 11 05:09:41 crc kubenswrapper[4651]: I1011 05:09:41.550272 4651 scope.go:117] "RemoveContainer" containerID="1d54f4fded1b6a9a87f4fffd04eae084cda01bfbe6537be6617a4cc9d36534f2" Oct 11 05:09:41 crc kubenswrapper[4651]: I1011 05:09:41.550581 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1d54f4fded1b6a9a87f4fffd04eae084cda01bfbe6537be6617a4cc9d36534f2"} err="failed to get container status \"1d54f4fded1b6a9a87f4fffd04eae084cda01bfbe6537be6617a4cc9d36534f2\": rpc error: code = NotFound desc = could not find container \"1d54f4fded1b6a9a87f4fffd04eae084cda01bfbe6537be6617a4cc9d36534f2\": container with ID starting with 1d54f4fded1b6a9a87f4fffd04eae084cda01bfbe6537be6617a4cc9d36534f2 not found: ID does not exist" Oct 11 05:09:41 crc kubenswrapper[4651]: I1011 05:09:41.550621 4651 scope.go:117] "RemoveContainer" containerID="4e6fd4ac980d59f619b0dc177c204d462d629eb3f0c8268921b4353231bb9ec1" Oct 11 05:09:41 crc kubenswrapper[4651]: I1011 05:09:41.551282 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4e6fd4ac980d59f619b0dc177c204d462d629eb3f0c8268921b4353231bb9ec1"} err="failed to get container status \"4e6fd4ac980d59f619b0dc177c204d462d629eb3f0c8268921b4353231bb9ec1\": rpc error: code = NotFound desc = could not find container \"4e6fd4ac980d59f619b0dc177c204d462d629eb3f0c8268921b4353231bb9ec1\": container with ID starting with 4e6fd4ac980d59f619b0dc177c204d462d629eb3f0c8268921b4353231bb9ec1 not found: ID does not exist" Oct 11 05:09:41 crc kubenswrapper[4651]: I1011 05:09:41.551312 4651 scope.go:117] "RemoveContainer" containerID="dcc11aa8f58e46b93834965562036ce361ca05d52c4144d1b7a8c0b091723b2a" Oct 11 05:09:41 crc kubenswrapper[4651]: I1011 05:09:41.552049 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dcc11aa8f58e46b93834965562036ce361ca05d52c4144d1b7a8c0b091723b2a"} err="failed to get container status \"dcc11aa8f58e46b93834965562036ce361ca05d52c4144d1b7a8c0b091723b2a\": rpc error: code = NotFound desc = could not find container \"dcc11aa8f58e46b93834965562036ce361ca05d52c4144d1b7a8c0b091723b2a\": container with ID starting with dcc11aa8f58e46b93834965562036ce361ca05d52c4144d1b7a8c0b091723b2a not found: ID does not exist" Oct 11 05:09:41 crc kubenswrapper[4651]: I1011 05:09:41.790469 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-464ng" event={"ID":"899b7e7f-0b2f-48f9-8b7e-c86614b06f41","Type":"ContainerStarted","Data":"d4e0a300f02f03e5a9ba3d36581d964fef9f440b4fc093ec7fbcdabc318650c9"} Oct 11 05:09:41 crc kubenswrapper[4651]: I1011 05:09:41.822408 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 11 05:09:41 crc kubenswrapper[4651]: I1011 05:09:41.823750 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-464ng" podStartSLOduration=2.166373817 podStartE2EDuration="9.82374024s" podCreationTimestamp="2025-10-11 05:09:32 +0000 UTC" firstStartedPulling="2025-10-11 05:09:33.715494695 +0000 UTC m=+1094.611727491" lastFinishedPulling="2025-10-11 05:09:41.372861118 +0000 UTC m=+1102.269093914" observedRunningTime="2025-10-11 05:09:41.806681645 +0000 UTC m=+1102.702914441" watchObservedRunningTime="2025-10-11 05:09:41.82374024 +0000 UTC m=+1102.719973036" Oct 11 05:09:42 crc kubenswrapper[4651]: I1011 05:09:42.805500 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"074c84b3-f777-4c05-89dc-6e55dd72a2b9","Type":"ContainerStarted","Data":"219dd73522998cd618c6f9c9561803b95ff3a4b743770fdda950016bd0acd214"} Oct 11 05:09:42 crc kubenswrapper[4651]: I1011 05:09:42.805795 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"074c84b3-f777-4c05-89dc-6e55dd72a2b9","Type":"ContainerStarted","Data":"7abf68480556773970568c5b6907eed71971e6f6e918e03e108359e238b648c6"} Oct 11 05:09:43 crc kubenswrapper[4651]: I1011 05:09:43.818670 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"074c84b3-f777-4c05-89dc-6e55dd72a2b9","Type":"ContainerStarted","Data":"91ec52c4f6fd17b99366ac76e5cb220299dd84af0a25c0368bf36cd642f8d1bd"} Oct 11 05:09:43 crc kubenswrapper[4651]: I1011 05:09:43.819509 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"074c84b3-f777-4c05-89dc-6e55dd72a2b9","Type":"ContainerStarted","Data":"2a9b1cfa40f355f42e0681ef17c2223bc5651c0db8a36d51f5380df98223ad59"} Oct 11 05:09:45 crc kubenswrapper[4651]: I1011 05:09:45.850769 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"074c84b3-f777-4c05-89dc-6e55dd72a2b9","Type":"ContainerStarted","Data":"7ded35d600cb77aff8b1b70082f2d923aa6ae14eff4a18e1b7fd0795b00c2f5f"} Oct 11 05:09:45 crc kubenswrapper[4651]: I1011 05:09:45.852735 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 11 05:09:45 crc kubenswrapper[4651]: I1011 05:09:45.894922 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=5.430822596 podStartE2EDuration="8.894885095s" podCreationTimestamp="2025-10-11 05:09:37 +0000 UTC" firstStartedPulling="2025-10-11 05:09:41.816858278 +0000 UTC m=+1102.713091084" lastFinishedPulling="2025-10-11 05:09:45.280920777 +0000 UTC m=+1106.177153583" observedRunningTime="2025-10-11 05:09:45.879608474 +0000 UTC m=+1106.775841340" watchObservedRunningTime="2025-10-11 05:09:45.894885095 +0000 UTC m=+1106.791117941" Oct 11 05:09:46 crc kubenswrapper[4651]: I1011 05:09:46.310379 4651 patch_prober.go:28] interesting pod/machine-config-daemon-78jnv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 05:09:46 crc kubenswrapper[4651]: I1011 05:09:46.310459 4651 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 05:09:52 crc kubenswrapper[4651]: I1011 05:09:52.939103 4651 generic.go:334] "Generic (PLEG): container finished" podID="899b7e7f-0b2f-48f9-8b7e-c86614b06f41" containerID="d4e0a300f02f03e5a9ba3d36581d964fef9f440b4fc093ec7fbcdabc318650c9" exitCode=0 Oct 11 05:09:52 crc kubenswrapper[4651]: I1011 05:09:52.939262 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-464ng" event={"ID":"899b7e7f-0b2f-48f9-8b7e-c86614b06f41","Type":"ContainerDied","Data":"d4e0a300f02f03e5a9ba3d36581d964fef9f440b4fc093ec7fbcdabc318650c9"} Oct 11 05:09:54 crc kubenswrapper[4651]: I1011 05:09:54.378431 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-464ng" Oct 11 05:09:54 crc kubenswrapper[4651]: I1011 05:09:54.483381 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/899b7e7f-0b2f-48f9-8b7e-c86614b06f41-combined-ca-bundle\") pod \"899b7e7f-0b2f-48f9-8b7e-c86614b06f41\" (UID: \"899b7e7f-0b2f-48f9-8b7e-c86614b06f41\") " Oct 11 05:09:54 crc kubenswrapper[4651]: I1011 05:09:54.483552 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/899b7e7f-0b2f-48f9-8b7e-c86614b06f41-config-data\") pod \"899b7e7f-0b2f-48f9-8b7e-c86614b06f41\" (UID: \"899b7e7f-0b2f-48f9-8b7e-c86614b06f41\") " Oct 11 05:09:54 crc kubenswrapper[4651]: I1011 05:09:54.483574 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jr2jr\" (UniqueName: \"kubernetes.io/projected/899b7e7f-0b2f-48f9-8b7e-c86614b06f41-kube-api-access-jr2jr\") pod \"899b7e7f-0b2f-48f9-8b7e-c86614b06f41\" (UID: \"899b7e7f-0b2f-48f9-8b7e-c86614b06f41\") " Oct 11 05:09:54 crc kubenswrapper[4651]: I1011 05:09:54.483655 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/899b7e7f-0b2f-48f9-8b7e-c86614b06f41-scripts\") pod \"899b7e7f-0b2f-48f9-8b7e-c86614b06f41\" (UID: \"899b7e7f-0b2f-48f9-8b7e-c86614b06f41\") " Oct 11 05:09:54 crc kubenswrapper[4651]: I1011 05:09:54.489151 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/899b7e7f-0b2f-48f9-8b7e-c86614b06f41-scripts" (OuterVolumeSpecName: "scripts") pod "899b7e7f-0b2f-48f9-8b7e-c86614b06f41" (UID: "899b7e7f-0b2f-48f9-8b7e-c86614b06f41"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:09:54 crc kubenswrapper[4651]: I1011 05:09:54.489461 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/899b7e7f-0b2f-48f9-8b7e-c86614b06f41-kube-api-access-jr2jr" (OuterVolumeSpecName: "kube-api-access-jr2jr") pod "899b7e7f-0b2f-48f9-8b7e-c86614b06f41" (UID: "899b7e7f-0b2f-48f9-8b7e-c86614b06f41"). InnerVolumeSpecName "kube-api-access-jr2jr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:09:54 crc kubenswrapper[4651]: I1011 05:09:54.508667 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/899b7e7f-0b2f-48f9-8b7e-c86614b06f41-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "899b7e7f-0b2f-48f9-8b7e-c86614b06f41" (UID: "899b7e7f-0b2f-48f9-8b7e-c86614b06f41"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:09:54 crc kubenswrapper[4651]: I1011 05:09:54.513156 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/899b7e7f-0b2f-48f9-8b7e-c86614b06f41-config-data" (OuterVolumeSpecName: "config-data") pod "899b7e7f-0b2f-48f9-8b7e-c86614b06f41" (UID: "899b7e7f-0b2f-48f9-8b7e-c86614b06f41"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:09:54 crc kubenswrapper[4651]: I1011 05:09:54.585092 4651 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/899b7e7f-0b2f-48f9-8b7e-c86614b06f41-scripts\") on node \"crc\" DevicePath \"\"" Oct 11 05:09:54 crc kubenswrapper[4651]: I1011 05:09:54.585118 4651 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/899b7e7f-0b2f-48f9-8b7e-c86614b06f41-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 05:09:54 crc kubenswrapper[4651]: I1011 05:09:54.585129 4651 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/899b7e7f-0b2f-48f9-8b7e-c86614b06f41-config-data\") on node \"crc\" DevicePath \"\"" Oct 11 05:09:54 crc kubenswrapper[4651]: I1011 05:09:54.585140 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jr2jr\" (UniqueName: \"kubernetes.io/projected/899b7e7f-0b2f-48f9-8b7e-c86614b06f41-kube-api-access-jr2jr\") on node \"crc\" DevicePath \"\"" Oct 11 05:09:54 crc kubenswrapper[4651]: I1011 05:09:54.960150 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-464ng" event={"ID":"899b7e7f-0b2f-48f9-8b7e-c86614b06f41","Type":"ContainerDied","Data":"52085ce43c98d8fac55e78150a6ef2736ed2a296b486825b7b615c9a15bcae9d"} Oct 11 05:09:54 crc kubenswrapper[4651]: I1011 05:09:54.960720 4651 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="52085ce43c98d8fac55e78150a6ef2736ed2a296b486825b7b615c9a15bcae9d" Oct 11 05:09:54 crc kubenswrapper[4651]: I1011 05:09:54.960242 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-464ng" Oct 11 05:09:55 crc kubenswrapper[4651]: I1011 05:09:55.047025 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 11 05:09:55 crc kubenswrapper[4651]: E1011 05:09:55.047683 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="899b7e7f-0b2f-48f9-8b7e-c86614b06f41" containerName="nova-cell0-conductor-db-sync" Oct 11 05:09:55 crc kubenswrapper[4651]: I1011 05:09:55.047770 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="899b7e7f-0b2f-48f9-8b7e-c86614b06f41" containerName="nova-cell0-conductor-db-sync" Oct 11 05:09:55 crc kubenswrapper[4651]: I1011 05:09:55.048094 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="899b7e7f-0b2f-48f9-8b7e-c86614b06f41" containerName="nova-cell0-conductor-db-sync" Oct 11 05:09:55 crc kubenswrapper[4651]: I1011 05:09:55.048777 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Oct 11 05:09:55 crc kubenswrapper[4651]: I1011 05:09:55.050692 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-tcrp8" Oct 11 05:09:55 crc kubenswrapper[4651]: I1011 05:09:55.051457 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Oct 11 05:09:55 crc kubenswrapper[4651]: I1011 05:09:55.063353 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 11 05:09:55 crc kubenswrapper[4651]: I1011 05:09:55.193314 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7a57592b-84ea-4d7c-ae5d-fcb7c009cbb0-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"7a57592b-84ea-4d7c-ae5d-fcb7c009cbb0\") " pod="openstack/nova-cell0-conductor-0" Oct 11 05:09:55 crc kubenswrapper[4651]: I1011 05:09:55.193597 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a57592b-84ea-4d7c-ae5d-fcb7c009cbb0-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"7a57592b-84ea-4d7c-ae5d-fcb7c009cbb0\") " pod="openstack/nova-cell0-conductor-0" Oct 11 05:09:55 crc kubenswrapper[4651]: I1011 05:09:55.193745 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z6g92\" (UniqueName: \"kubernetes.io/projected/7a57592b-84ea-4d7c-ae5d-fcb7c009cbb0-kube-api-access-z6g92\") pod \"nova-cell0-conductor-0\" (UID: \"7a57592b-84ea-4d7c-ae5d-fcb7c009cbb0\") " pod="openstack/nova-cell0-conductor-0" Oct 11 05:09:55 crc kubenswrapper[4651]: I1011 05:09:55.295843 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z6g92\" (UniqueName: \"kubernetes.io/projected/7a57592b-84ea-4d7c-ae5d-fcb7c009cbb0-kube-api-access-z6g92\") pod \"nova-cell0-conductor-0\" (UID: \"7a57592b-84ea-4d7c-ae5d-fcb7c009cbb0\") " pod="openstack/nova-cell0-conductor-0" Oct 11 05:09:55 crc kubenswrapper[4651]: I1011 05:09:55.296077 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7a57592b-84ea-4d7c-ae5d-fcb7c009cbb0-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"7a57592b-84ea-4d7c-ae5d-fcb7c009cbb0\") " pod="openstack/nova-cell0-conductor-0" Oct 11 05:09:55 crc kubenswrapper[4651]: I1011 05:09:55.296123 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a57592b-84ea-4d7c-ae5d-fcb7c009cbb0-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"7a57592b-84ea-4d7c-ae5d-fcb7c009cbb0\") " pod="openstack/nova-cell0-conductor-0" Oct 11 05:09:55 crc kubenswrapper[4651]: I1011 05:09:55.307234 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7a57592b-84ea-4d7c-ae5d-fcb7c009cbb0-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"7a57592b-84ea-4d7c-ae5d-fcb7c009cbb0\") " pod="openstack/nova-cell0-conductor-0" Oct 11 05:09:55 crc kubenswrapper[4651]: I1011 05:09:55.309524 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a57592b-84ea-4d7c-ae5d-fcb7c009cbb0-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"7a57592b-84ea-4d7c-ae5d-fcb7c009cbb0\") " pod="openstack/nova-cell0-conductor-0" Oct 11 05:09:55 crc kubenswrapper[4651]: I1011 05:09:55.324870 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z6g92\" (UniqueName: \"kubernetes.io/projected/7a57592b-84ea-4d7c-ae5d-fcb7c009cbb0-kube-api-access-z6g92\") pod \"nova-cell0-conductor-0\" (UID: \"7a57592b-84ea-4d7c-ae5d-fcb7c009cbb0\") " pod="openstack/nova-cell0-conductor-0" Oct 11 05:09:55 crc kubenswrapper[4651]: I1011 05:09:55.363231 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Oct 11 05:09:55 crc kubenswrapper[4651]: I1011 05:09:55.812667 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 11 05:09:55 crc kubenswrapper[4651]: I1011 05:09:55.972915 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"7a57592b-84ea-4d7c-ae5d-fcb7c009cbb0","Type":"ContainerStarted","Data":"43a27c980988454e227b63260a5c5bbbaa17e84cac10591723d076d0810f8cef"} Oct 11 05:09:56 crc kubenswrapper[4651]: I1011 05:09:56.982846 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"7a57592b-84ea-4d7c-ae5d-fcb7c009cbb0","Type":"ContainerStarted","Data":"c6cd44ae30a10d8b5235288c2180bb1e62e471c73371dd36412a38d73da9c8b0"} Oct 11 05:09:56 crc kubenswrapper[4651]: I1011 05:09:56.984086 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Oct 11 05:09:57 crc kubenswrapper[4651]: I1011 05:09:57.012114 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.012094878 podStartE2EDuration="2.012094878s" podCreationTimestamp="2025-10-11 05:09:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 05:09:57.003405191 +0000 UTC m=+1117.899638007" watchObservedRunningTime="2025-10-11 05:09:57.012094878 +0000 UTC m=+1117.908327674" Oct 11 05:10:05 crc kubenswrapper[4651]: I1011 05:10:05.412784 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Oct 11 05:10:05 crc kubenswrapper[4651]: I1011 05:10:05.882639 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-prppv"] Oct 11 05:10:05 crc kubenswrapper[4651]: I1011 05:10:05.883677 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-prppv"] Oct 11 05:10:05 crc kubenswrapper[4651]: I1011 05:10:05.883751 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-prppv" Oct 11 05:10:05 crc kubenswrapper[4651]: I1011 05:10:05.886671 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Oct 11 05:10:05 crc kubenswrapper[4651]: I1011 05:10:05.886684 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Oct 11 05:10:06 crc kubenswrapper[4651]: I1011 05:10:06.006644 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5a6ed193-5ad3-4f79-aa3d-d35033be1f21-config-data\") pod \"nova-cell0-cell-mapping-prppv\" (UID: \"5a6ed193-5ad3-4f79-aa3d-d35033be1f21\") " pod="openstack/nova-cell0-cell-mapping-prppv" Oct 11 05:10:06 crc kubenswrapper[4651]: I1011 05:10:06.007034 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q2nhp\" (UniqueName: \"kubernetes.io/projected/5a6ed193-5ad3-4f79-aa3d-d35033be1f21-kube-api-access-q2nhp\") pod \"nova-cell0-cell-mapping-prppv\" (UID: \"5a6ed193-5ad3-4f79-aa3d-d35033be1f21\") " pod="openstack/nova-cell0-cell-mapping-prppv" Oct 11 05:10:06 crc kubenswrapper[4651]: I1011 05:10:06.007091 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5a6ed193-5ad3-4f79-aa3d-d35033be1f21-scripts\") pod \"nova-cell0-cell-mapping-prppv\" (UID: \"5a6ed193-5ad3-4f79-aa3d-d35033be1f21\") " pod="openstack/nova-cell0-cell-mapping-prppv" Oct 11 05:10:06 crc kubenswrapper[4651]: I1011 05:10:06.007421 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a6ed193-5ad3-4f79-aa3d-d35033be1f21-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-prppv\" (UID: \"5a6ed193-5ad3-4f79-aa3d-d35033be1f21\") " pod="openstack/nova-cell0-cell-mapping-prppv" Oct 11 05:10:06 crc kubenswrapper[4651]: I1011 05:10:06.064007 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Oct 11 05:10:06 crc kubenswrapper[4651]: I1011 05:10:06.065222 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 11 05:10:06 crc kubenswrapper[4651]: I1011 05:10:06.068960 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Oct 11 05:10:06 crc kubenswrapper[4651]: I1011 05:10:06.084944 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 11 05:10:06 crc kubenswrapper[4651]: I1011 05:10:06.109128 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a6ed193-5ad3-4f79-aa3d-d35033be1f21-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-prppv\" (UID: \"5a6ed193-5ad3-4f79-aa3d-d35033be1f21\") " pod="openstack/nova-cell0-cell-mapping-prppv" Oct 11 05:10:06 crc kubenswrapper[4651]: I1011 05:10:06.109222 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5a6ed193-5ad3-4f79-aa3d-d35033be1f21-config-data\") pod \"nova-cell0-cell-mapping-prppv\" (UID: \"5a6ed193-5ad3-4f79-aa3d-d35033be1f21\") " pod="openstack/nova-cell0-cell-mapping-prppv" Oct 11 05:10:06 crc kubenswrapper[4651]: I1011 05:10:06.109254 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q2nhp\" (UniqueName: \"kubernetes.io/projected/5a6ed193-5ad3-4f79-aa3d-d35033be1f21-kube-api-access-q2nhp\") pod \"nova-cell0-cell-mapping-prppv\" (UID: \"5a6ed193-5ad3-4f79-aa3d-d35033be1f21\") " pod="openstack/nova-cell0-cell-mapping-prppv" Oct 11 05:10:06 crc kubenswrapper[4651]: I1011 05:10:06.109273 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5a6ed193-5ad3-4f79-aa3d-d35033be1f21-scripts\") pod \"nova-cell0-cell-mapping-prppv\" (UID: \"5a6ed193-5ad3-4f79-aa3d-d35033be1f21\") " pod="openstack/nova-cell0-cell-mapping-prppv" Oct 11 05:10:06 crc kubenswrapper[4651]: I1011 05:10:06.115793 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5a6ed193-5ad3-4f79-aa3d-d35033be1f21-scripts\") pod \"nova-cell0-cell-mapping-prppv\" (UID: \"5a6ed193-5ad3-4f79-aa3d-d35033be1f21\") " pod="openstack/nova-cell0-cell-mapping-prppv" Oct 11 05:10:06 crc kubenswrapper[4651]: I1011 05:10:06.122837 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a6ed193-5ad3-4f79-aa3d-d35033be1f21-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-prppv\" (UID: \"5a6ed193-5ad3-4f79-aa3d-d35033be1f21\") " pod="openstack/nova-cell0-cell-mapping-prppv" Oct 11 05:10:06 crc kubenswrapper[4651]: I1011 05:10:06.128438 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5a6ed193-5ad3-4f79-aa3d-d35033be1f21-config-data\") pod \"nova-cell0-cell-mapping-prppv\" (UID: \"5a6ed193-5ad3-4f79-aa3d-d35033be1f21\") " pod="openstack/nova-cell0-cell-mapping-prppv" Oct 11 05:10:06 crc kubenswrapper[4651]: I1011 05:10:06.136722 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Oct 11 05:10:06 crc kubenswrapper[4651]: I1011 05:10:06.138284 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 11 05:10:06 crc kubenswrapper[4651]: I1011 05:10:06.138862 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q2nhp\" (UniqueName: \"kubernetes.io/projected/5a6ed193-5ad3-4f79-aa3d-d35033be1f21-kube-api-access-q2nhp\") pod \"nova-cell0-cell-mapping-prppv\" (UID: \"5a6ed193-5ad3-4f79-aa3d-d35033be1f21\") " pod="openstack/nova-cell0-cell-mapping-prppv" Oct 11 05:10:06 crc kubenswrapper[4651]: I1011 05:10:06.140734 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Oct 11 05:10:06 crc kubenswrapper[4651]: I1011 05:10:06.151830 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 11 05:10:06 crc kubenswrapper[4651]: I1011 05:10:06.210973 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2wq47\" (UniqueName: \"kubernetes.io/projected/8432ef3d-65ef-4602-a0b6-d79d9ae9a73b-kube-api-access-2wq47\") pod \"nova-scheduler-0\" (UID: \"8432ef3d-65ef-4602-a0b6-d79d9ae9a73b\") " pod="openstack/nova-scheduler-0" Oct 11 05:10:06 crc kubenswrapper[4651]: I1011 05:10:06.211045 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8432ef3d-65ef-4602-a0b6-d79d9ae9a73b-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"8432ef3d-65ef-4602-a0b6-d79d9ae9a73b\") " pod="openstack/nova-scheduler-0" Oct 11 05:10:06 crc kubenswrapper[4651]: I1011 05:10:06.211069 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8432ef3d-65ef-4602-a0b6-d79d9ae9a73b-config-data\") pod \"nova-scheduler-0\" (UID: \"8432ef3d-65ef-4602-a0b6-d79d9ae9a73b\") " pod="openstack/nova-scheduler-0" Oct 11 05:10:06 crc kubenswrapper[4651]: I1011 05:10:06.219943 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-prppv" Oct 11 05:10:06 crc kubenswrapper[4651]: I1011 05:10:06.270291 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 11 05:10:06 crc kubenswrapper[4651]: I1011 05:10:06.279711 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 11 05:10:06 crc kubenswrapper[4651]: I1011 05:10:06.281049 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Oct 11 05:10:06 crc kubenswrapper[4651]: I1011 05:10:06.281436 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Oct 11 05:10:06 crc kubenswrapper[4651]: I1011 05:10:06.282566 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 11 05:10:06 crc kubenswrapper[4651]: I1011 05:10:06.289902 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Oct 11 05:10:06 crc kubenswrapper[4651]: I1011 05:10:06.297026 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 11 05:10:06 crc kubenswrapper[4651]: I1011 05:10:06.312331 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 11 05:10:06 crc kubenswrapper[4651]: I1011 05:10:06.314812 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8a5d8d3c-139c-4d8b-94ec-38316a4534d3-logs\") pod \"nova-api-0\" (UID: \"8a5d8d3c-139c-4d8b-94ec-38316a4534d3\") " pod="openstack/nova-api-0" Oct 11 05:10:06 crc kubenswrapper[4651]: I1011 05:10:06.314940 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2wq47\" (UniqueName: \"kubernetes.io/projected/8432ef3d-65ef-4602-a0b6-d79d9ae9a73b-kube-api-access-2wq47\") pod \"nova-scheduler-0\" (UID: \"8432ef3d-65ef-4602-a0b6-d79d9ae9a73b\") " pod="openstack/nova-scheduler-0" Oct 11 05:10:06 crc kubenswrapper[4651]: I1011 05:10:06.314977 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8a5d8d3c-139c-4d8b-94ec-38316a4534d3-config-data\") pod \"nova-api-0\" (UID: \"8a5d8d3c-139c-4d8b-94ec-38316a4534d3\") " pod="openstack/nova-api-0" Oct 11 05:10:06 crc kubenswrapper[4651]: I1011 05:10:06.315013 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8432ef3d-65ef-4602-a0b6-d79d9ae9a73b-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"8432ef3d-65ef-4602-a0b6-d79d9ae9a73b\") " pod="openstack/nova-scheduler-0" Oct 11 05:10:06 crc kubenswrapper[4651]: I1011 05:10:06.315030 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pwmhg\" (UniqueName: \"kubernetes.io/projected/8a5d8d3c-139c-4d8b-94ec-38316a4534d3-kube-api-access-pwmhg\") pod \"nova-api-0\" (UID: \"8a5d8d3c-139c-4d8b-94ec-38316a4534d3\") " pod="openstack/nova-api-0" Oct 11 05:10:06 crc kubenswrapper[4651]: I1011 05:10:06.315052 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8432ef3d-65ef-4602-a0b6-d79d9ae9a73b-config-data\") pod \"nova-scheduler-0\" (UID: \"8432ef3d-65ef-4602-a0b6-d79d9ae9a73b\") " pod="openstack/nova-scheduler-0" Oct 11 05:10:06 crc kubenswrapper[4651]: I1011 05:10:06.315093 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a5d8d3c-139c-4d8b-94ec-38316a4534d3-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"8a5d8d3c-139c-4d8b-94ec-38316a4534d3\") " pod="openstack/nova-api-0" Oct 11 05:10:06 crc kubenswrapper[4651]: I1011 05:10:06.318530 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8432ef3d-65ef-4602-a0b6-d79d9ae9a73b-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"8432ef3d-65ef-4602-a0b6-d79d9ae9a73b\") " pod="openstack/nova-scheduler-0" Oct 11 05:10:06 crc kubenswrapper[4651]: I1011 05:10:06.339634 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8432ef3d-65ef-4602-a0b6-d79d9ae9a73b-config-data\") pod \"nova-scheduler-0\" (UID: \"8432ef3d-65ef-4602-a0b6-d79d9ae9a73b\") " pod="openstack/nova-scheduler-0" Oct 11 05:10:06 crc kubenswrapper[4651]: I1011 05:10:06.346305 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2wq47\" (UniqueName: \"kubernetes.io/projected/8432ef3d-65ef-4602-a0b6-d79d9ae9a73b-kube-api-access-2wq47\") pod \"nova-scheduler-0\" (UID: \"8432ef3d-65ef-4602-a0b6-d79d9ae9a73b\") " pod="openstack/nova-scheduler-0" Oct 11 05:10:06 crc kubenswrapper[4651]: I1011 05:10:06.386489 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 11 05:10:06 crc kubenswrapper[4651]: I1011 05:10:06.409939 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-865f5d856f-hjq47"] Oct 11 05:10:06 crc kubenswrapper[4651]: I1011 05:10:06.411548 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-865f5d856f-hjq47" Oct 11 05:10:06 crc kubenswrapper[4651]: I1011 05:10:06.417350 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8a5d8d3c-139c-4d8b-94ec-38316a4534d3-config-data\") pod \"nova-api-0\" (UID: \"8a5d8d3c-139c-4d8b-94ec-38316a4534d3\") " pod="openstack/nova-api-0" Oct 11 05:10:06 crc kubenswrapper[4651]: I1011 05:10:06.417410 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dmzvg\" (UniqueName: \"kubernetes.io/projected/bce169c0-d36f-4fd1-b42f-9e2b481789b1-kube-api-access-dmzvg\") pod \"nova-cell1-novncproxy-0\" (UID: \"bce169c0-d36f-4fd1-b42f-9e2b481789b1\") " pod="openstack/nova-cell1-novncproxy-0" Oct 11 05:10:06 crc kubenswrapper[4651]: I1011 05:10:06.417432 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pwmhg\" (UniqueName: \"kubernetes.io/projected/8a5d8d3c-139c-4d8b-94ec-38316a4534d3-kube-api-access-pwmhg\") pod \"nova-api-0\" (UID: \"8a5d8d3c-139c-4d8b-94ec-38316a4534d3\") " pod="openstack/nova-api-0" Oct 11 05:10:06 crc kubenswrapper[4651]: I1011 05:10:06.417463 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7bc83296-49f0-4236-89d7-c430b78d15f1-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"7bc83296-49f0-4236-89d7-c430b78d15f1\") " pod="openstack/nova-metadata-0" Oct 11 05:10:06 crc kubenswrapper[4651]: I1011 05:10:06.417497 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a5d8d3c-139c-4d8b-94ec-38316a4534d3-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"8a5d8d3c-139c-4d8b-94ec-38316a4534d3\") " pod="openstack/nova-api-0" Oct 11 05:10:06 crc kubenswrapper[4651]: I1011 05:10:06.417514 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7bc83296-49f0-4236-89d7-c430b78d15f1-logs\") pod \"nova-metadata-0\" (UID: \"7bc83296-49f0-4236-89d7-c430b78d15f1\") " pod="openstack/nova-metadata-0" Oct 11 05:10:06 crc kubenswrapper[4651]: I1011 05:10:06.417535 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7bc83296-49f0-4236-89d7-c430b78d15f1-config-data\") pod \"nova-metadata-0\" (UID: \"7bc83296-49f0-4236-89d7-c430b78d15f1\") " pod="openstack/nova-metadata-0" Oct 11 05:10:06 crc kubenswrapper[4651]: I1011 05:10:06.417550 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bce169c0-d36f-4fd1-b42f-9e2b481789b1-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"bce169c0-d36f-4fd1-b42f-9e2b481789b1\") " pod="openstack/nova-cell1-novncproxy-0" Oct 11 05:10:06 crc kubenswrapper[4651]: I1011 05:10:06.417564 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8a5d8d3c-139c-4d8b-94ec-38316a4534d3-logs\") pod \"nova-api-0\" (UID: \"8a5d8d3c-139c-4d8b-94ec-38316a4534d3\") " pod="openstack/nova-api-0" Oct 11 05:10:06 crc kubenswrapper[4651]: I1011 05:10:06.417580 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bce169c0-d36f-4fd1-b42f-9e2b481789b1-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"bce169c0-d36f-4fd1-b42f-9e2b481789b1\") " pod="openstack/nova-cell1-novncproxy-0" Oct 11 05:10:06 crc kubenswrapper[4651]: I1011 05:10:06.417623 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sb7bn\" (UniqueName: \"kubernetes.io/projected/7bc83296-49f0-4236-89d7-c430b78d15f1-kube-api-access-sb7bn\") pod \"nova-metadata-0\" (UID: \"7bc83296-49f0-4236-89d7-c430b78d15f1\") " pod="openstack/nova-metadata-0" Oct 11 05:10:06 crc kubenswrapper[4651]: I1011 05:10:06.420236 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8a5d8d3c-139c-4d8b-94ec-38316a4534d3-logs\") pod \"nova-api-0\" (UID: \"8a5d8d3c-139c-4d8b-94ec-38316a4534d3\") " pod="openstack/nova-api-0" Oct 11 05:10:06 crc kubenswrapper[4651]: I1011 05:10:06.420974 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8a5d8d3c-139c-4d8b-94ec-38316a4534d3-config-data\") pod \"nova-api-0\" (UID: \"8a5d8d3c-139c-4d8b-94ec-38316a4534d3\") " pod="openstack/nova-api-0" Oct 11 05:10:06 crc kubenswrapper[4651]: I1011 05:10:06.433562 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-865f5d856f-hjq47"] Oct 11 05:10:06 crc kubenswrapper[4651]: I1011 05:10:06.443525 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a5d8d3c-139c-4d8b-94ec-38316a4534d3-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"8a5d8d3c-139c-4d8b-94ec-38316a4534d3\") " pod="openstack/nova-api-0" Oct 11 05:10:06 crc kubenswrapper[4651]: I1011 05:10:06.462244 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pwmhg\" (UniqueName: \"kubernetes.io/projected/8a5d8d3c-139c-4d8b-94ec-38316a4534d3-kube-api-access-pwmhg\") pod \"nova-api-0\" (UID: \"8a5d8d3c-139c-4d8b-94ec-38316a4534d3\") " pod="openstack/nova-api-0" Oct 11 05:10:06 crc kubenswrapper[4651]: I1011 05:10:06.519428 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7bc83296-49f0-4236-89d7-c430b78d15f1-config-data\") pod \"nova-metadata-0\" (UID: \"7bc83296-49f0-4236-89d7-c430b78d15f1\") " pod="openstack/nova-metadata-0" Oct 11 05:10:06 crc kubenswrapper[4651]: I1011 05:10:06.519491 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bce169c0-d36f-4fd1-b42f-9e2b481789b1-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"bce169c0-d36f-4fd1-b42f-9e2b481789b1\") " pod="openstack/nova-cell1-novncproxy-0" Oct 11 05:10:06 crc kubenswrapper[4651]: I1011 05:10:06.519521 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bce169c0-d36f-4fd1-b42f-9e2b481789b1-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"bce169c0-d36f-4fd1-b42f-9e2b481789b1\") " pod="openstack/nova-cell1-novncproxy-0" Oct 11 05:10:06 crc kubenswrapper[4651]: I1011 05:10:06.519595 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sb7bn\" (UniqueName: \"kubernetes.io/projected/7bc83296-49f0-4236-89d7-c430b78d15f1-kube-api-access-sb7bn\") pod \"nova-metadata-0\" (UID: \"7bc83296-49f0-4236-89d7-c430b78d15f1\") " pod="openstack/nova-metadata-0" Oct 11 05:10:06 crc kubenswrapper[4651]: I1011 05:10:06.519678 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a4717124-cc6a-4961-9828-a89d0132ba8a-dns-swift-storage-0\") pod \"dnsmasq-dns-865f5d856f-hjq47\" (UID: \"a4717124-cc6a-4961-9828-a89d0132ba8a\") " pod="openstack/dnsmasq-dns-865f5d856f-hjq47" Oct 11 05:10:06 crc kubenswrapper[4651]: I1011 05:10:06.519715 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a4717124-cc6a-4961-9828-a89d0132ba8a-config\") pod \"dnsmasq-dns-865f5d856f-hjq47\" (UID: \"a4717124-cc6a-4961-9828-a89d0132ba8a\") " pod="openstack/dnsmasq-dns-865f5d856f-hjq47" Oct 11 05:10:06 crc kubenswrapper[4651]: I1011 05:10:06.519735 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a4717124-cc6a-4961-9828-a89d0132ba8a-dns-svc\") pod \"dnsmasq-dns-865f5d856f-hjq47\" (UID: \"a4717124-cc6a-4961-9828-a89d0132ba8a\") " pod="openstack/dnsmasq-dns-865f5d856f-hjq47" Oct 11 05:10:06 crc kubenswrapper[4651]: I1011 05:10:06.519759 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9xplw\" (UniqueName: \"kubernetes.io/projected/a4717124-cc6a-4961-9828-a89d0132ba8a-kube-api-access-9xplw\") pod \"dnsmasq-dns-865f5d856f-hjq47\" (UID: \"a4717124-cc6a-4961-9828-a89d0132ba8a\") " pod="openstack/dnsmasq-dns-865f5d856f-hjq47" Oct 11 05:10:06 crc kubenswrapper[4651]: I1011 05:10:06.519799 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a4717124-cc6a-4961-9828-a89d0132ba8a-ovsdbserver-sb\") pod \"dnsmasq-dns-865f5d856f-hjq47\" (UID: \"a4717124-cc6a-4961-9828-a89d0132ba8a\") " pod="openstack/dnsmasq-dns-865f5d856f-hjq47" Oct 11 05:10:06 crc kubenswrapper[4651]: I1011 05:10:06.519857 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dmzvg\" (UniqueName: \"kubernetes.io/projected/bce169c0-d36f-4fd1-b42f-9e2b481789b1-kube-api-access-dmzvg\") pod \"nova-cell1-novncproxy-0\" (UID: \"bce169c0-d36f-4fd1-b42f-9e2b481789b1\") " pod="openstack/nova-cell1-novncproxy-0" Oct 11 05:10:06 crc kubenswrapper[4651]: I1011 05:10:06.519907 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7bc83296-49f0-4236-89d7-c430b78d15f1-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"7bc83296-49f0-4236-89d7-c430b78d15f1\") " pod="openstack/nova-metadata-0" Oct 11 05:10:06 crc kubenswrapper[4651]: I1011 05:10:06.519936 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a4717124-cc6a-4961-9828-a89d0132ba8a-ovsdbserver-nb\") pod \"dnsmasq-dns-865f5d856f-hjq47\" (UID: \"a4717124-cc6a-4961-9828-a89d0132ba8a\") " pod="openstack/dnsmasq-dns-865f5d856f-hjq47" Oct 11 05:10:06 crc kubenswrapper[4651]: I1011 05:10:06.519977 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7bc83296-49f0-4236-89d7-c430b78d15f1-logs\") pod \"nova-metadata-0\" (UID: \"7bc83296-49f0-4236-89d7-c430b78d15f1\") " pod="openstack/nova-metadata-0" Oct 11 05:10:06 crc kubenswrapper[4651]: I1011 05:10:06.526005 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7bc83296-49f0-4236-89d7-c430b78d15f1-logs\") pod \"nova-metadata-0\" (UID: \"7bc83296-49f0-4236-89d7-c430b78d15f1\") " pod="openstack/nova-metadata-0" Oct 11 05:10:06 crc kubenswrapper[4651]: I1011 05:10:06.528962 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7bc83296-49f0-4236-89d7-c430b78d15f1-config-data\") pod \"nova-metadata-0\" (UID: \"7bc83296-49f0-4236-89d7-c430b78d15f1\") " pod="openstack/nova-metadata-0" Oct 11 05:10:06 crc kubenswrapper[4651]: I1011 05:10:06.528984 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bce169c0-d36f-4fd1-b42f-9e2b481789b1-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"bce169c0-d36f-4fd1-b42f-9e2b481789b1\") " pod="openstack/nova-cell1-novncproxy-0" Oct 11 05:10:06 crc kubenswrapper[4651]: I1011 05:10:06.529158 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bce169c0-d36f-4fd1-b42f-9e2b481789b1-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"bce169c0-d36f-4fd1-b42f-9e2b481789b1\") " pod="openstack/nova-cell1-novncproxy-0" Oct 11 05:10:06 crc kubenswrapper[4651]: I1011 05:10:06.529268 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7bc83296-49f0-4236-89d7-c430b78d15f1-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"7bc83296-49f0-4236-89d7-c430b78d15f1\") " pod="openstack/nova-metadata-0" Oct 11 05:10:06 crc kubenswrapper[4651]: I1011 05:10:06.534729 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 11 05:10:06 crc kubenswrapper[4651]: I1011 05:10:06.543442 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sb7bn\" (UniqueName: \"kubernetes.io/projected/7bc83296-49f0-4236-89d7-c430b78d15f1-kube-api-access-sb7bn\") pod \"nova-metadata-0\" (UID: \"7bc83296-49f0-4236-89d7-c430b78d15f1\") " pod="openstack/nova-metadata-0" Oct 11 05:10:06 crc kubenswrapper[4651]: I1011 05:10:06.551593 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dmzvg\" (UniqueName: \"kubernetes.io/projected/bce169c0-d36f-4fd1-b42f-9e2b481789b1-kube-api-access-dmzvg\") pod \"nova-cell1-novncproxy-0\" (UID: \"bce169c0-d36f-4fd1-b42f-9e2b481789b1\") " pod="openstack/nova-cell1-novncproxy-0" Oct 11 05:10:06 crc kubenswrapper[4651]: I1011 05:10:06.621649 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a4717124-cc6a-4961-9828-a89d0132ba8a-dns-swift-storage-0\") pod \"dnsmasq-dns-865f5d856f-hjq47\" (UID: \"a4717124-cc6a-4961-9828-a89d0132ba8a\") " pod="openstack/dnsmasq-dns-865f5d856f-hjq47" Oct 11 05:10:06 crc kubenswrapper[4651]: I1011 05:10:06.621715 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a4717124-cc6a-4961-9828-a89d0132ba8a-config\") pod \"dnsmasq-dns-865f5d856f-hjq47\" (UID: \"a4717124-cc6a-4961-9828-a89d0132ba8a\") " pod="openstack/dnsmasq-dns-865f5d856f-hjq47" Oct 11 05:10:06 crc kubenswrapper[4651]: I1011 05:10:06.621740 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a4717124-cc6a-4961-9828-a89d0132ba8a-dns-svc\") pod \"dnsmasq-dns-865f5d856f-hjq47\" (UID: \"a4717124-cc6a-4961-9828-a89d0132ba8a\") " pod="openstack/dnsmasq-dns-865f5d856f-hjq47" Oct 11 05:10:06 crc kubenswrapper[4651]: I1011 05:10:06.621762 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9xplw\" (UniqueName: \"kubernetes.io/projected/a4717124-cc6a-4961-9828-a89d0132ba8a-kube-api-access-9xplw\") pod \"dnsmasq-dns-865f5d856f-hjq47\" (UID: \"a4717124-cc6a-4961-9828-a89d0132ba8a\") " pod="openstack/dnsmasq-dns-865f5d856f-hjq47" Oct 11 05:10:06 crc kubenswrapper[4651]: I1011 05:10:06.621801 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a4717124-cc6a-4961-9828-a89d0132ba8a-ovsdbserver-sb\") pod \"dnsmasq-dns-865f5d856f-hjq47\" (UID: \"a4717124-cc6a-4961-9828-a89d0132ba8a\") " pod="openstack/dnsmasq-dns-865f5d856f-hjq47" Oct 11 05:10:06 crc kubenswrapper[4651]: I1011 05:10:06.621877 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a4717124-cc6a-4961-9828-a89d0132ba8a-ovsdbserver-nb\") pod \"dnsmasq-dns-865f5d856f-hjq47\" (UID: \"a4717124-cc6a-4961-9828-a89d0132ba8a\") " pod="openstack/dnsmasq-dns-865f5d856f-hjq47" Oct 11 05:10:06 crc kubenswrapper[4651]: I1011 05:10:06.624355 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a4717124-cc6a-4961-9828-a89d0132ba8a-dns-swift-storage-0\") pod \"dnsmasq-dns-865f5d856f-hjq47\" (UID: \"a4717124-cc6a-4961-9828-a89d0132ba8a\") " pod="openstack/dnsmasq-dns-865f5d856f-hjq47" Oct 11 05:10:06 crc kubenswrapper[4651]: I1011 05:10:06.624658 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a4717124-cc6a-4961-9828-a89d0132ba8a-ovsdbserver-nb\") pod \"dnsmasq-dns-865f5d856f-hjq47\" (UID: \"a4717124-cc6a-4961-9828-a89d0132ba8a\") " pod="openstack/dnsmasq-dns-865f5d856f-hjq47" Oct 11 05:10:06 crc kubenswrapper[4651]: I1011 05:10:06.626712 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a4717124-cc6a-4961-9828-a89d0132ba8a-dns-svc\") pod \"dnsmasq-dns-865f5d856f-hjq47\" (UID: \"a4717124-cc6a-4961-9828-a89d0132ba8a\") " pod="openstack/dnsmasq-dns-865f5d856f-hjq47" Oct 11 05:10:06 crc kubenswrapper[4651]: I1011 05:10:06.627531 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a4717124-cc6a-4961-9828-a89d0132ba8a-ovsdbserver-sb\") pod \"dnsmasq-dns-865f5d856f-hjq47\" (UID: \"a4717124-cc6a-4961-9828-a89d0132ba8a\") " pod="openstack/dnsmasq-dns-865f5d856f-hjq47" Oct 11 05:10:06 crc kubenswrapper[4651]: I1011 05:10:06.627726 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a4717124-cc6a-4961-9828-a89d0132ba8a-config\") pod \"dnsmasq-dns-865f5d856f-hjq47\" (UID: \"a4717124-cc6a-4961-9828-a89d0132ba8a\") " pod="openstack/dnsmasq-dns-865f5d856f-hjq47" Oct 11 05:10:06 crc kubenswrapper[4651]: I1011 05:10:06.646368 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9xplw\" (UniqueName: \"kubernetes.io/projected/a4717124-cc6a-4961-9828-a89d0132ba8a-kube-api-access-9xplw\") pod \"dnsmasq-dns-865f5d856f-hjq47\" (UID: \"a4717124-cc6a-4961-9828-a89d0132ba8a\") " pod="openstack/dnsmasq-dns-865f5d856f-hjq47" Oct 11 05:10:06 crc kubenswrapper[4651]: I1011 05:10:06.707272 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 11 05:10:06 crc kubenswrapper[4651]: I1011 05:10:06.716198 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-prppv"] Oct 11 05:10:06 crc kubenswrapper[4651]: I1011 05:10:06.719650 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 11 05:10:06 crc kubenswrapper[4651]: W1011 05:10:06.737841 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5a6ed193_5ad3_4f79_aa3d_d35033be1f21.slice/crio-0f576ecdb7edacb1abeee160cbf899dc874db6544fb54532b75558087638b6b1 WatchSource:0}: Error finding container 0f576ecdb7edacb1abeee160cbf899dc874db6544fb54532b75558087638b6b1: Status 404 returned error can't find the container with id 0f576ecdb7edacb1abeee160cbf899dc874db6544fb54532b75558087638b6b1 Oct 11 05:10:06 crc kubenswrapper[4651]: I1011 05:10:06.767419 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-865f5d856f-hjq47" Oct 11 05:10:07 crc kubenswrapper[4651]: I1011 05:10:07.017281 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 11 05:10:07 crc kubenswrapper[4651]: I1011 05:10:07.046886 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-r47rs"] Oct 11 05:10:07 crc kubenswrapper[4651]: I1011 05:10:07.048839 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-r47rs" Oct 11 05:10:07 crc kubenswrapper[4651]: I1011 05:10:07.051709 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Oct 11 05:10:07 crc kubenswrapper[4651]: I1011 05:10:07.052058 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Oct 11 05:10:07 crc kubenswrapper[4651]: I1011 05:10:07.079961 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-r47rs"] Oct 11 05:10:07 crc kubenswrapper[4651]: I1011 05:10:07.124479 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 11 05:10:07 crc kubenswrapper[4651]: I1011 05:10:07.179651 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-prppv" event={"ID":"5a6ed193-5ad3-4f79-aa3d-d35033be1f21","Type":"ContainerStarted","Data":"c7c739f99e77a0286e117cc409a94b977ca85e1f2f58b286780045ba328fa488"} Oct 11 05:10:07 crc kubenswrapper[4651]: I1011 05:10:07.179694 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-prppv" event={"ID":"5a6ed193-5ad3-4f79-aa3d-d35033be1f21","Type":"ContainerStarted","Data":"0f576ecdb7edacb1abeee160cbf899dc874db6544fb54532b75558087638b6b1"} Oct 11 05:10:07 crc kubenswrapper[4651]: I1011 05:10:07.182428 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"8432ef3d-65ef-4602-a0b6-d79d9ae9a73b","Type":"ContainerStarted","Data":"d1f56bb552021ca02540c943f0b23f33975b942c3207974195efa785a7149293"} Oct 11 05:10:07 crc kubenswrapper[4651]: I1011 05:10:07.211263 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-prppv" podStartSLOduration=2.211245113 podStartE2EDuration="2.211245113s" podCreationTimestamp="2025-10-11 05:10:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 05:10:07.194633709 +0000 UTC m=+1128.090866505" watchObservedRunningTime="2025-10-11 05:10:07.211245113 +0000 UTC m=+1128.107477909" Oct 11 05:10:07 crc kubenswrapper[4651]: I1011 05:10:07.251017 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dt4ns\" (UniqueName: \"kubernetes.io/projected/0a3cd96c-5270-46d9-befe-b18a3467ddde-kube-api-access-dt4ns\") pod \"nova-cell1-conductor-db-sync-r47rs\" (UID: \"0a3cd96c-5270-46d9-befe-b18a3467ddde\") " pod="openstack/nova-cell1-conductor-db-sync-r47rs" Oct 11 05:10:07 crc kubenswrapper[4651]: I1011 05:10:07.251444 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0a3cd96c-5270-46d9-befe-b18a3467ddde-scripts\") pod \"nova-cell1-conductor-db-sync-r47rs\" (UID: \"0a3cd96c-5270-46d9-befe-b18a3467ddde\") " pod="openstack/nova-cell1-conductor-db-sync-r47rs" Oct 11 05:10:07 crc kubenswrapper[4651]: I1011 05:10:07.251541 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0a3cd96c-5270-46d9-befe-b18a3467ddde-config-data\") pod \"nova-cell1-conductor-db-sync-r47rs\" (UID: \"0a3cd96c-5270-46d9-befe-b18a3467ddde\") " pod="openstack/nova-cell1-conductor-db-sync-r47rs" Oct 11 05:10:07 crc kubenswrapper[4651]: I1011 05:10:07.251616 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0a3cd96c-5270-46d9-befe-b18a3467ddde-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-r47rs\" (UID: \"0a3cd96c-5270-46d9-befe-b18a3467ddde\") " pod="openstack/nova-cell1-conductor-db-sync-r47rs" Oct 11 05:10:07 crc kubenswrapper[4651]: I1011 05:10:07.256386 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 11 05:10:07 crc kubenswrapper[4651]: W1011 05:10:07.271845 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbce169c0_d36f_4fd1_b42f_9e2b481789b1.slice/crio-89df9807363cf0b8af55d3063e00cf3dd65f618be0ef5da3e2259c5a65fb2dd5 WatchSource:0}: Error finding container 89df9807363cf0b8af55d3063e00cf3dd65f618be0ef5da3e2259c5a65fb2dd5: Status 404 returned error can't find the container with id 89df9807363cf0b8af55d3063e00cf3dd65f618be0ef5da3e2259c5a65fb2dd5 Oct 11 05:10:07 crc kubenswrapper[4651]: I1011 05:10:07.352740 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dt4ns\" (UniqueName: \"kubernetes.io/projected/0a3cd96c-5270-46d9-befe-b18a3467ddde-kube-api-access-dt4ns\") pod \"nova-cell1-conductor-db-sync-r47rs\" (UID: \"0a3cd96c-5270-46d9-befe-b18a3467ddde\") " pod="openstack/nova-cell1-conductor-db-sync-r47rs" Oct 11 05:10:07 crc kubenswrapper[4651]: I1011 05:10:07.352884 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0a3cd96c-5270-46d9-befe-b18a3467ddde-scripts\") pod \"nova-cell1-conductor-db-sync-r47rs\" (UID: \"0a3cd96c-5270-46d9-befe-b18a3467ddde\") " pod="openstack/nova-cell1-conductor-db-sync-r47rs" Oct 11 05:10:07 crc kubenswrapper[4651]: I1011 05:10:07.352910 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0a3cd96c-5270-46d9-befe-b18a3467ddde-config-data\") pod \"nova-cell1-conductor-db-sync-r47rs\" (UID: \"0a3cd96c-5270-46d9-befe-b18a3467ddde\") " pod="openstack/nova-cell1-conductor-db-sync-r47rs" Oct 11 05:10:07 crc kubenswrapper[4651]: I1011 05:10:07.352926 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0a3cd96c-5270-46d9-befe-b18a3467ddde-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-r47rs\" (UID: \"0a3cd96c-5270-46d9-befe-b18a3467ddde\") " pod="openstack/nova-cell1-conductor-db-sync-r47rs" Oct 11 05:10:07 crc kubenswrapper[4651]: I1011 05:10:07.358719 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0a3cd96c-5270-46d9-befe-b18a3467ddde-scripts\") pod \"nova-cell1-conductor-db-sync-r47rs\" (UID: \"0a3cd96c-5270-46d9-befe-b18a3467ddde\") " pod="openstack/nova-cell1-conductor-db-sync-r47rs" Oct 11 05:10:07 crc kubenswrapper[4651]: I1011 05:10:07.358857 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0a3cd96c-5270-46d9-befe-b18a3467ddde-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-r47rs\" (UID: \"0a3cd96c-5270-46d9-befe-b18a3467ddde\") " pod="openstack/nova-cell1-conductor-db-sync-r47rs" Oct 11 05:10:07 crc kubenswrapper[4651]: I1011 05:10:07.358996 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0a3cd96c-5270-46d9-befe-b18a3467ddde-config-data\") pod \"nova-cell1-conductor-db-sync-r47rs\" (UID: \"0a3cd96c-5270-46d9-befe-b18a3467ddde\") " pod="openstack/nova-cell1-conductor-db-sync-r47rs" Oct 11 05:10:07 crc kubenswrapper[4651]: I1011 05:10:07.386341 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dt4ns\" (UniqueName: \"kubernetes.io/projected/0a3cd96c-5270-46d9-befe-b18a3467ddde-kube-api-access-dt4ns\") pod \"nova-cell1-conductor-db-sync-r47rs\" (UID: \"0a3cd96c-5270-46d9-befe-b18a3467ddde\") " pod="openstack/nova-cell1-conductor-db-sync-r47rs" Oct 11 05:10:07 crc kubenswrapper[4651]: I1011 05:10:07.414434 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-865f5d856f-hjq47"] Oct 11 05:10:07 crc kubenswrapper[4651]: I1011 05:10:07.432791 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 11 05:10:07 crc kubenswrapper[4651]: I1011 05:10:07.455469 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Oct 11 05:10:07 crc kubenswrapper[4651]: I1011 05:10:07.668605 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-r47rs" Oct 11 05:10:08 crc kubenswrapper[4651]: I1011 05:10:08.129938 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-r47rs"] Oct 11 05:10:08 crc kubenswrapper[4651]: I1011 05:10:08.204952 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-r47rs" event={"ID":"0a3cd96c-5270-46d9-befe-b18a3467ddde","Type":"ContainerStarted","Data":"091f19b58591e767dd01ec26d3de36877cdcc33037fd84227e9dd83ae7cda930"} Oct 11 05:10:08 crc kubenswrapper[4651]: I1011 05:10:08.208501 4651 generic.go:334] "Generic (PLEG): container finished" podID="a4717124-cc6a-4961-9828-a89d0132ba8a" containerID="354dcd1cf4794b626e50f7f06cca8c34ffa19f2d2cf103ac857e4649bddf2064" exitCode=0 Oct 11 05:10:08 crc kubenswrapper[4651]: I1011 05:10:08.208573 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-865f5d856f-hjq47" event={"ID":"a4717124-cc6a-4961-9828-a89d0132ba8a","Type":"ContainerDied","Data":"354dcd1cf4794b626e50f7f06cca8c34ffa19f2d2cf103ac857e4649bddf2064"} Oct 11 05:10:08 crc kubenswrapper[4651]: I1011 05:10:08.208598 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-865f5d856f-hjq47" event={"ID":"a4717124-cc6a-4961-9828-a89d0132ba8a","Type":"ContainerStarted","Data":"b210b9ff59e2bcb0220d8678523ea723919d74c57a05326f3a2eb1c9a3f5dc49"} Oct 11 05:10:08 crc kubenswrapper[4651]: I1011 05:10:08.213485 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"8a5d8d3c-139c-4d8b-94ec-38316a4534d3","Type":"ContainerStarted","Data":"bfe7e0694abab1761a2605212c7cebcb64e3a7c4ca67247ec7bc74b509a1ebcf"} Oct 11 05:10:08 crc kubenswrapper[4651]: I1011 05:10:08.216279 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"bce169c0-d36f-4fd1-b42f-9e2b481789b1","Type":"ContainerStarted","Data":"89df9807363cf0b8af55d3063e00cf3dd65f618be0ef5da3e2259c5a65fb2dd5"} Oct 11 05:10:08 crc kubenswrapper[4651]: I1011 05:10:08.218340 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"7bc83296-49f0-4236-89d7-c430b78d15f1","Type":"ContainerStarted","Data":"f2c77a32f388ae0062e3671c6cfb9a1e43c7fd762a391ff675244ecb318f68eb"} Oct 11 05:10:09 crc kubenswrapper[4651]: I1011 05:10:09.228654 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-r47rs" event={"ID":"0a3cd96c-5270-46d9-befe-b18a3467ddde","Type":"ContainerStarted","Data":"f0d5dd42154f318918654257dcf6c3f9e7191592a7221086692304cf8696c65e"} Oct 11 05:10:09 crc kubenswrapper[4651]: I1011 05:10:09.231079 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-865f5d856f-hjq47" event={"ID":"a4717124-cc6a-4961-9828-a89d0132ba8a","Type":"ContainerStarted","Data":"322b84020b523200bb81cab8d12535239913338f3b2748d01164b6fcfcbdba29"} Oct 11 05:10:09 crc kubenswrapper[4651]: I1011 05:10:09.231341 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-865f5d856f-hjq47" Oct 11 05:10:09 crc kubenswrapper[4651]: I1011 05:10:09.254046 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-r47rs" podStartSLOduration=2.254026225 podStartE2EDuration="2.254026225s" podCreationTimestamp="2025-10-11 05:10:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 05:10:09.241524964 +0000 UTC m=+1130.137757770" watchObservedRunningTime="2025-10-11 05:10:09.254026225 +0000 UTC m=+1130.150259031" Oct 11 05:10:09 crc kubenswrapper[4651]: I1011 05:10:09.268543 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-865f5d856f-hjq47" podStartSLOduration=3.268524737 podStartE2EDuration="3.268524737s" podCreationTimestamp="2025-10-11 05:10:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 05:10:09.260454775 +0000 UTC m=+1130.156687571" watchObservedRunningTime="2025-10-11 05:10:09.268524737 +0000 UTC m=+1130.164757533" Oct 11 05:10:09 crc kubenswrapper[4651]: I1011 05:10:09.919832 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 11 05:10:09 crc kubenswrapper[4651]: I1011 05:10:09.925796 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 11 05:10:11 crc kubenswrapper[4651]: I1011 05:10:11.250809 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"8a5d8d3c-139c-4d8b-94ec-38316a4534d3","Type":"ContainerStarted","Data":"ba84dac6a4e13cc66539d24b28b518fdaa622f6434cd3e3f606907765d7286d9"} Oct 11 05:10:11 crc kubenswrapper[4651]: I1011 05:10:11.252903 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"8a5d8d3c-139c-4d8b-94ec-38316a4534d3","Type":"ContainerStarted","Data":"6a98d8d406b9633cc15a7fd907e9f68ce3d2f462fd619613d209a2cd067cf200"} Oct 11 05:10:11 crc kubenswrapper[4651]: I1011 05:10:11.253967 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"bce169c0-d36f-4fd1-b42f-9e2b481789b1","Type":"ContainerStarted","Data":"3a3faff201d518822a82a72637f86b67c2e0f1f60f2c87badb99768d9390b5ec"} Oct 11 05:10:11 crc kubenswrapper[4651]: I1011 05:10:11.254159 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="bce169c0-d36f-4fd1-b42f-9e2b481789b1" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://3a3faff201d518822a82a72637f86b67c2e0f1f60f2c87badb99768d9390b5ec" gracePeriod=30 Oct 11 05:10:11 crc kubenswrapper[4651]: I1011 05:10:11.257527 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"7bc83296-49f0-4236-89d7-c430b78d15f1","Type":"ContainerStarted","Data":"eee392aacccb5204dc790abd04edc740c454ddc00e51fb814b736784f769f409"} Oct 11 05:10:11 crc kubenswrapper[4651]: I1011 05:10:11.257750 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"7bc83296-49f0-4236-89d7-c430b78d15f1","Type":"ContainerStarted","Data":"c9d0de5ecda628b62c5d643ddf27cf39fe05d386c3fe9109a47a4688e89f4639"} Oct 11 05:10:11 crc kubenswrapper[4651]: I1011 05:10:11.257770 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="7bc83296-49f0-4236-89d7-c430b78d15f1" containerName="nova-metadata-metadata" containerID="cri-o://eee392aacccb5204dc790abd04edc740c454ddc00e51fb814b736784f769f409" gracePeriod=30 Oct 11 05:10:11 crc kubenswrapper[4651]: I1011 05:10:11.257688 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="7bc83296-49f0-4236-89d7-c430b78d15f1" containerName="nova-metadata-log" containerID="cri-o://c9d0de5ecda628b62c5d643ddf27cf39fe05d386c3fe9109a47a4688e89f4639" gracePeriod=30 Oct 11 05:10:11 crc kubenswrapper[4651]: I1011 05:10:11.262741 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"8432ef3d-65ef-4602-a0b6-d79d9ae9a73b","Type":"ContainerStarted","Data":"77e5112d038240ad7b4f0dee7a7dbf0208e5696fc759677bdb99b1eef7e5d1fc"} Oct 11 05:10:11 crc kubenswrapper[4651]: I1011 05:10:11.274271 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=1.918511089 podStartE2EDuration="5.274255196s" podCreationTimestamp="2025-10-11 05:10:06 +0000 UTC" firstStartedPulling="2025-10-11 05:10:07.177358289 +0000 UTC m=+1128.073591095" lastFinishedPulling="2025-10-11 05:10:10.533102406 +0000 UTC m=+1131.429335202" observedRunningTime="2025-10-11 05:10:11.267861406 +0000 UTC m=+1132.164094212" watchObservedRunningTime="2025-10-11 05:10:11.274255196 +0000 UTC m=+1132.170487992" Oct 11 05:10:11 crc kubenswrapper[4651]: I1011 05:10:11.294152 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.174072009 podStartE2EDuration="5.294131001s" podCreationTimestamp="2025-10-11 05:10:06 +0000 UTC" firstStartedPulling="2025-10-11 05:10:07.416951902 +0000 UTC m=+1128.313184698" lastFinishedPulling="2025-10-11 05:10:10.537010894 +0000 UTC m=+1131.433243690" observedRunningTime="2025-10-11 05:10:11.28367073 +0000 UTC m=+1132.179903556" watchObservedRunningTime="2025-10-11 05:10:11.294131001 +0000 UTC m=+1132.190363797" Oct 11 05:10:11 crc kubenswrapper[4651]: I1011 05:10:11.321936 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.063220006 podStartE2EDuration="5.321917214s" podCreationTimestamp="2025-10-11 05:10:06 +0000 UTC" firstStartedPulling="2025-10-11 05:10:07.27446704 +0000 UTC m=+1128.170699836" lastFinishedPulling="2025-10-11 05:10:10.533164248 +0000 UTC m=+1131.429397044" observedRunningTime="2025-10-11 05:10:11.300676174 +0000 UTC m=+1132.196908970" watchObservedRunningTime="2025-10-11 05:10:11.321917214 +0000 UTC m=+1132.218150000" Oct 11 05:10:11 crc kubenswrapper[4651]: I1011 05:10:11.331630 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=1.832663138 podStartE2EDuration="5.331614976s" podCreationTimestamp="2025-10-11 05:10:06 +0000 UTC" firstStartedPulling="2025-10-11 05:10:07.034744043 +0000 UTC m=+1127.930976839" lastFinishedPulling="2025-10-11 05:10:10.533695881 +0000 UTC m=+1131.429928677" observedRunningTime="2025-10-11 05:10:11.316067178 +0000 UTC m=+1132.212299984" watchObservedRunningTime="2025-10-11 05:10:11.331614976 +0000 UTC m=+1132.227847772" Oct 11 05:10:11 crc kubenswrapper[4651]: I1011 05:10:11.390295 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Oct 11 05:10:11 crc kubenswrapper[4651]: I1011 05:10:11.707544 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Oct 11 05:10:11 crc kubenswrapper[4651]: I1011 05:10:11.720866 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 11 05:10:11 crc kubenswrapper[4651]: I1011 05:10:11.721136 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 11 05:10:12 crc kubenswrapper[4651]: I1011 05:10:12.131479 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 11 05:10:12 crc kubenswrapper[4651]: I1011 05:10:12.251042 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb7bn\" (UniqueName: \"kubernetes.io/projected/7bc83296-49f0-4236-89d7-c430b78d15f1-kube-api-access-sb7bn\") pod \"7bc83296-49f0-4236-89d7-c430b78d15f1\" (UID: \"7bc83296-49f0-4236-89d7-c430b78d15f1\") " Oct 11 05:10:12 crc kubenswrapper[4651]: I1011 05:10:12.251220 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7bc83296-49f0-4236-89d7-c430b78d15f1-combined-ca-bundle\") pod \"7bc83296-49f0-4236-89d7-c430b78d15f1\" (UID: \"7bc83296-49f0-4236-89d7-c430b78d15f1\") " Oct 11 05:10:12 crc kubenswrapper[4651]: I1011 05:10:12.251258 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7bc83296-49f0-4236-89d7-c430b78d15f1-config-data\") pod \"7bc83296-49f0-4236-89d7-c430b78d15f1\" (UID: \"7bc83296-49f0-4236-89d7-c430b78d15f1\") " Oct 11 05:10:12 crc kubenswrapper[4651]: I1011 05:10:12.251392 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7bc83296-49f0-4236-89d7-c430b78d15f1-logs\") pod \"7bc83296-49f0-4236-89d7-c430b78d15f1\" (UID: \"7bc83296-49f0-4236-89d7-c430b78d15f1\") " Oct 11 05:10:12 crc kubenswrapper[4651]: I1011 05:10:12.252893 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7bc83296-49f0-4236-89d7-c430b78d15f1-logs" (OuterVolumeSpecName: "logs") pod "7bc83296-49f0-4236-89d7-c430b78d15f1" (UID: "7bc83296-49f0-4236-89d7-c430b78d15f1"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 05:10:12 crc kubenswrapper[4651]: I1011 05:10:12.271205 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bc83296-49f0-4236-89d7-c430b78d15f1-kube-api-access-sb7bn" (OuterVolumeSpecName: "kube-api-access-sb7bn") pod "7bc83296-49f0-4236-89d7-c430b78d15f1" (UID: "7bc83296-49f0-4236-89d7-c430b78d15f1"). InnerVolumeSpecName "kube-api-access-sb7bn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:10:12 crc kubenswrapper[4651]: I1011 05:10:12.296372 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7bc83296-49f0-4236-89d7-c430b78d15f1-config-data" (OuterVolumeSpecName: "config-data") pod "7bc83296-49f0-4236-89d7-c430b78d15f1" (UID: "7bc83296-49f0-4236-89d7-c430b78d15f1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:10:12 crc kubenswrapper[4651]: I1011 05:10:12.298858 4651 generic.go:334] "Generic (PLEG): container finished" podID="7bc83296-49f0-4236-89d7-c430b78d15f1" containerID="eee392aacccb5204dc790abd04edc740c454ddc00e51fb814b736784f769f409" exitCode=0 Oct 11 05:10:12 crc kubenswrapper[4651]: I1011 05:10:12.298901 4651 generic.go:334] "Generic (PLEG): container finished" podID="7bc83296-49f0-4236-89d7-c430b78d15f1" containerID="c9d0de5ecda628b62c5d643ddf27cf39fe05d386c3fe9109a47a4688e89f4639" exitCode=143 Oct 11 05:10:12 crc kubenswrapper[4651]: I1011 05:10:12.301268 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 11 05:10:12 crc kubenswrapper[4651]: I1011 05:10:12.301510 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"7bc83296-49f0-4236-89d7-c430b78d15f1","Type":"ContainerDied","Data":"eee392aacccb5204dc790abd04edc740c454ddc00e51fb814b736784f769f409"} Oct 11 05:10:12 crc kubenswrapper[4651]: I1011 05:10:12.301631 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"7bc83296-49f0-4236-89d7-c430b78d15f1","Type":"ContainerDied","Data":"c9d0de5ecda628b62c5d643ddf27cf39fe05d386c3fe9109a47a4688e89f4639"} Oct 11 05:10:12 crc kubenswrapper[4651]: I1011 05:10:12.301726 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"7bc83296-49f0-4236-89d7-c430b78d15f1","Type":"ContainerDied","Data":"f2c77a32f388ae0062e3671c6cfb9a1e43c7fd762a391ff675244ecb318f68eb"} Oct 11 05:10:12 crc kubenswrapper[4651]: I1011 05:10:12.301853 4651 scope.go:117] "RemoveContainer" containerID="eee392aacccb5204dc790abd04edc740c454ddc00e51fb814b736784f769f409" Oct 11 05:10:12 crc kubenswrapper[4651]: I1011 05:10:12.318301 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7bc83296-49f0-4236-89d7-c430b78d15f1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7bc83296-49f0-4236-89d7-c430b78d15f1" (UID: "7bc83296-49f0-4236-89d7-c430b78d15f1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:10:12 crc kubenswrapper[4651]: I1011 05:10:12.342621 4651 scope.go:117] "RemoveContainer" containerID="c9d0de5ecda628b62c5d643ddf27cf39fe05d386c3fe9109a47a4688e89f4639" Oct 11 05:10:12 crc kubenswrapper[4651]: I1011 05:10:12.353411 4651 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7bc83296-49f0-4236-89d7-c430b78d15f1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 05:10:12 crc kubenswrapper[4651]: I1011 05:10:12.353636 4651 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7bc83296-49f0-4236-89d7-c430b78d15f1-config-data\") on node \"crc\" DevicePath \"\"" Oct 11 05:10:12 crc kubenswrapper[4651]: I1011 05:10:12.353716 4651 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7bc83296-49f0-4236-89d7-c430b78d15f1-logs\") on node \"crc\" DevicePath \"\"" Oct 11 05:10:12 crc kubenswrapper[4651]: I1011 05:10:12.353784 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb7bn\" (UniqueName: \"kubernetes.io/projected/7bc83296-49f0-4236-89d7-c430b78d15f1-kube-api-access-sb7bn\") on node \"crc\" DevicePath \"\"" Oct 11 05:10:12 crc kubenswrapper[4651]: I1011 05:10:12.361060 4651 scope.go:117] "RemoveContainer" containerID="eee392aacccb5204dc790abd04edc740c454ddc00e51fb814b736784f769f409" Oct 11 05:10:12 crc kubenswrapper[4651]: E1011 05:10:12.361517 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eee392aacccb5204dc790abd04edc740c454ddc00e51fb814b736784f769f409\": container with ID starting with eee392aacccb5204dc790abd04edc740c454ddc00e51fb814b736784f769f409 not found: ID does not exist" containerID="eee392aacccb5204dc790abd04edc740c454ddc00e51fb814b736784f769f409" Oct 11 05:10:12 crc kubenswrapper[4651]: I1011 05:10:12.361623 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eee392aacccb5204dc790abd04edc740c454ddc00e51fb814b736784f769f409"} err="failed to get container status \"eee392aacccb5204dc790abd04edc740c454ddc00e51fb814b736784f769f409\": rpc error: code = NotFound desc = could not find container \"eee392aacccb5204dc790abd04edc740c454ddc00e51fb814b736784f769f409\": container with ID starting with eee392aacccb5204dc790abd04edc740c454ddc00e51fb814b736784f769f409 not found: ID does not exist" Oct 11 05:10:12 crc kubenswrapper[4651]: I1011 05:10:12.361702 4651 scope.go:117] "RemoveContainer" containerID="c9d0de5ecda628b62c5d643ddf27cf39fe05d386c3fe9109a47a4688e89f4639" Oct 11 05:10:12 crc kubenswrapper[4651]: E1011 05:10:12.362026 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c9d0de5ecda628b62c5d643ddf27cf39fe05d386c3fe9109a47a4688e89f4639\": container with ID starting with c9d0de5ecda628b62c5d643ddf27cf39fe05d386c3fe9109a47a4688e89f4639 not found: ID does not exist" containerID="c9d0de5ecda628b62c5d643ddf27cf39fe05d386c3fe9109a47a4688e89f4639" Oct 11 05:10:12 crc kubenswrapper[4651]: I1011 05:10:12.362119 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c9d0de5ecda628b62c5d643ddf27cf39fe05d386c3fe9109a47a4688e89f4639"} err="failed to get container status \"c9d0de5ecda628b62c5d643ddf27cf39fe05d386c3fe9109a47a4688e89f4639\": rpc error: code = NotFound desc = could not find container \"c9d0de5ecda628b62c5d643ddf27cf39fe05d386c3fe9109a47a4688e89f4639\": container with ID starting with c9d0de5ecda628b62c5d643ddf27cf39fe05d386c3fe9109a47a4688e89f4639 not found: ID does not exist" Oct 11 05:10:12 crc kubenswrapper[4651]: I1011 05:10:12.362204 4651 scope.go:117] "RemoveContainer" containerID="eee392aacccb5204dc790abd04edc740c454ddc00e51fb814b736784f769f409" Oct 11 05:10:12 crc kubenswrapper[4651]: I1011 05:10:12.362481 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eee392aacccb5204dc790abd04edc740c454ddc00e51fb814b736784f769f409"} err="failed to get container status \"eee392aacccb5204dc790abd04edc740c454ddc00e51fb814b736784f769f409\": rpc error: code = NotFound desc = could not find container \"eee392aacccb5204dc790abd04edc740c454ddc00e51fb814b736784f769f409\": container with ID starting with eee392aacccb5204dc790abd04edc740c454ddc00e51fb814b736784f769f409 not found: ID does not exist" Oct 11 05:10:12 crc kubenswrapper[4651]: I1011 05:10:12.362577 4651 scope.go:117] "RemoveContainer" containerID="c9d0de5ecda628b62c5d643ddf27cf39fe05d386c3fe9109a47a4688e89f4639" Oct 11 05:10:12 crc kubenswrapper[4651]: I1011 05:10:12.362918 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c9d0de5ecda628b62c5d643ddf27cf39fe05d386c3fe9109a47a4688e89f4639"} err="failed to get container status \"c9d0de5ecda628b62c5d643ddf27cf39fe05d386c3fe9109a47a4688e89f4639\": rpc error: code = NotFound desc = could not find container \"c9d0de5ecda628b62c5d643ddf27cf39fe05d386c3fe9109a47a4688e89f4639\": container with ID starting with c9d0de5ecda628b62c5d643ddf27cf39fe05d386c3fe9109a47a4688e89f4639 not found: ID does not exist" Oct 11 05:10:12 crc kubenswrapper[4651]: I1011 05:10:12.632631 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 11 05:10:12 crc kubenswrapper[4651]: I1011 05:10:12.639835 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Oct 11 05:10:12 crc kubenswrapper[4651]: I1011 05:10:12.664102 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Oct 11 05:10:12 crc kubenswrapper[4651]: E1011 05:10:12.664561 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7bc83296-49f0-4236-89d7-c430b78d15f1" containerName="nova-metadata-log" Oct 11 05:10:12 crc kubenswrapper[4651]: I1011 05:10:12.664578 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="7bc83296-49f0-4236-89d7-c430b78d15f1" containerName="nova-metadata-log" Oct 11 05:10:12 crc kubenswrapper[4651]: E1011 05:10:12.664631 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7bc83296-49f0-4236-89d7-c430b78d15f1" containerName="nova-metadata-metadata" Oct 11 05:10:12 crc kubenswrapper[4651]: I1011 05:10:12.664640 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="7bc83296-49f0-4236-89d7-c430b78d15f1" containerName="nova-metadata-metadata" Oct 11 05:10:12 crc kubenswrapper[4651]: I1011 05:10:12.664872 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="7bc83296-49f0-4236-89d7-c430b78d15f1" containerName="nova-metadata-log" Oct 11 05:10:12 crc kubenswrapper[4651]: I1011 05:10:12.664894 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="7bc83296-49f0-4236-89d7-c430b78d15f1" containerName="nova-metadata-metadata" Oct 11 05:10:12 crc kubenswrapper[4651]: I1011 05:10:12.666158 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 11 05:10:12 crc kubenswrapper[4651]: I1011 05:10:12.668604 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Oct 11 05:10:12 crc kubenswrapper[4651]: I1011 05:10:12.668813 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Oct 11 05:10:12 crc kubenswrapper[4651]: I1011 05:10:12.675027 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 11 05:10:12 crc kubenswrapper[4651]: I1011 05:10:12.760787 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ec7e759f-da4e-4dcb-93dc-38007a6f35e5-logs\") pod \"nova-metadata-0\" (UID: \"ec7e759f-da4e-4dcb-93dc-38007a6f35e5\") " pod="openstack/nova-metadata-0" Oct 11 05:10:12 crc kubenswrapper[4651]: I1011 05:10:12.760908 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec7e759f-da4e-4dcb-93dc-38007a6f35e5-config-data\") pod \"nova-metadata-0\" (UID: \"ec7e759f-da4e-4dcb-93dc-38007a6f35e5\") " pod="openstack/nova-metadata-0" Oct 11 05:10:12 crc kubenswrapper[4651]: I1011 05:10:12.760938 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec7e759f-da4e-4dcb-93dc-38007a6f35e5-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"ec7e759f-da4e-4dcb-93dc-38007a6f35e5\") " pod="openstack/nova-metadata-0" Oct 11 05:10:12 crc kubenswrapper[4651]: I1011 05:10:12.760963 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jt9z5\" (UniqueName: \"kubernetes.io/projected/ec7e759f-da4e-4dcb-93dc-38007a6f35e5-kube-api-access-jt9z5\") pod \"nova-metadata-0\" (UID: \"ec7e759f-da4e-4dcb-93dc-38007a6f35e5\") " pod="openstack/nova-metadata-0" Oct 11 05:10:12 crc kubenswrapper[4651]: I1011 05:10:12.760979 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/ec7e759f-da4e-4dcb-93dc-38007a6f35e5-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"ec7e759f-da4e-4dcb-93dc-38007a6f35e5\") " pod="openstack/nova-metadata-0" Oct 11 05:10:12 crc kubenswrapper[4651]: I1011 05:10:12.863174 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ec7e759f-da4e-4dcb-93dc-38007a6f35e5-logs\") pod \"nova-metadata-0\" (UID: \"ec7e759f-da4e-4dcb-93dc-38007a6f35e5\") " pod="openstack/nova-metadata-0" Oct 11 05:10:12 crc kubenswrapper[4651]: I1011 05:10:12.863357 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec7e759f-da4e-4dcb-93dc-38007a6f35e5-config-data\") pod \"nova-metadata-0\" (UID: \"ec7e759f-da4e-4dcb-93dc-38007a6f35e5\") " pod="openstack/nova-metadata-0" Oct 11 05:10:12 crc kubenswrapper[4651]: I1011 05:10:12.863423 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec7e759f-da4e-4dcb-93dc-38007a6f35e5-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"ec7e759f-da4e-4dcb-93dc-38007a6f35e5\") " pod="openstack/nova-metadata-0" Oct 11 05:10:12 crc kubenswrapper[4651]: I1011 05:10:12.863466 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jt9z5\" (UniqueName: \"kubernetes.io/projected/ec7e759f-da4e-4dcb-93dc-38007a6f35e5-kube-api-access-jt9z5\") pod \"nova-metadata-0\" (UID: \"ec7e759f-da4e-4dcb-93dc-38007a6f35e5\") " pod="openstack/nova-metadata-0" Oct 11 05:10:12 crc kubenswrapper[4651]: I1011 05:10:12.863495 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/ec7e759f-da4e-4dcb-93dc-38007a6f35e5-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"ec7e759f-da4e-4dcb-93dc-38007a6f35e5\") " pod="openstack/nova-metadata-0" Oct 11 05:10:12 crc kubenswrapper[4651]: I1011 05:10:12.863801 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ec7e759f-da4e-4dcb-93dc-38007a6f35e5-logs\") pod \"nova-metadata-0\" (UID: \"ec7e759f-da4e-4dcb-93dc-38007a6f35e5\") " pod="openstack/nova-metadata-0" Oct 11 05:10:12 crc kubenswrapper[4651]: I1011 05:10:12.868497 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec7e759f-da4e-4dcb-93dc-38007a6f35e5-config-data\") pod \"nova-metadata-0\" (UID: \"ec7e759f-da4e-4dcb-93dc-38007a6f35e5\") " pod="openstack/nova-metadata-0" Oct 11 05:10:12 crc kubenswrapper[4651]: I1011 05:10:12.876572 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec7e759f-da4e-4dcb-93dc-38007a6f35e5-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"ec7e759f-da4e-4dcb-93dc-38007a6f35e5\") " pod="openstack/nova-metadata-0" Oct 11 05:10:12 crc kubenswrapper[4651]: I1011 05:10:12.891527 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/ec7e759f-da4e-4dcb-93dc-38007a6f35e5-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"ec7e759f-da4e-4dcb-93dc-38007a6f35e5\") " pod="openstack/nova-metadata-0" Oct 11 05:10:12 crc kubenswrapper[4651]: I1011 05:10:12.894337 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jt9z5\" (UniqueName: \"kubernetes.io/projected/ec7e759f-da4e-4dcb-93dc-38007a6f35e5-kube-api-access-jt9z5\") pod \"nova-metadata-0\" (UID: \"ec7e759f-da4e-4dcb-93dc-38007a6f35e5\") " pod="openstack/nova-metadata-0" Oct 11 05:10:13 crc kubenswrapper[4651]: I1011 05:10:13.030894 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 11 05:10:13 crc kubenswrapper[4651]: I1011 05:10:13.540420 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 11 05:10:13 crc kubenswrapper[4651]: W1011 05:10:13.545205 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podec7e759f_da4e_4dcb_93dc_38007a6f35e5.slice/crio-03bac5b0e38a5078d623f561b2e8a42f590b55f6d12c48cb9cd67b74c5d0ef90 WatchSource:0}: Error finding container 03bac5b0e38a5078d623f561b2e8a42f590b55f6d12c48cb9cd67b74c5d0ef90: Status 404 returned error can't find the container with id 03bac5b0e38a5078d623f561b2e8a42f590b55f6d12c48cb9cd67b74c5d0ef90 Oct 11 05:10:13 crc kubenswrapper[4651]: I1011 05:10:13.893679 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bc83296-49f0-4236-89d7-c430b78d15f1" path="/var/lib/kubelet/pods/7bc83296-49f0-4236-89d7-c430b78d15f1/volumes" Oct 11 05:10:14 crc kubenswrapper[4651]: I1011 05:10:14.327505 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ec7e759f-da4e-4dcb-93dc-38007a6f35e5","Type":"ContainerStarted","Data":"12fae75d98e36b27b918f532dc62592c5639e56448f4b6f0edce247167f3ad36"} Oct 11 05:10:14 crc kubenswrapper[4651]: I1011 05:10:14.327582 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ec7e759f-da4e-4dcb-93dc-38007a6f35e5","Type":"ContainerStarted","Data":"e8310cbdad903a169c29278c76f4ad365cc30cac07dd78e4341cd0d715f06c5d"} Oct 11 05:10:14 crc kubenswrapper[4651]: I1011 05:10:14.327612 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ec7e759f-da4e-4dcb-93dc-38007a6f35e5","Type":"ContainerStarted","Data":"03bac5b0e38a5078d623f561b2e8a42f590b55f6d12c48cb9cd67b74c5d0ef90"} Oct 11 05:10:15 crc kubenswrapper[4651]: I1011 05:10:15.341035 4651 generic.go:334] "Generic (PLEG): container finished" podID="5a6ed193-5ad3-4f79-aa3d-d35033be1f21" containerID="c7c739f99e77a0286e117cc409a94b977ca85e1f2f58b286780045ba328fa488" exitCode=0 Oct 11 05:10:15 crc kubenswrapper[4651]: I1011 05:10:15.341240 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-prppv" event={"ID":"5a6ed193-5ad3-4f79-aa3d-d35033be1f21","Type":"ContainerDied","Data":"c7c739f99e77a0286e117cc409a94b977ca85e1f2f58b286780045ba328fa488"} Oct 11 05:10:15 crc kubenswrapper[4651]: I1011 05:10:15.343879 4651 generic.go:334] "Generic (PLEG): container finished" podID="0a3cd96c-5270-46d9-befe-b18a3467ddde" containerID="f0d5dd42154f318918654257dcf6c3f9e7191592a7221086692304cf8696c65e" exitCode=0 Oct 11 05:10:15 crc kubenswrapper[4651]: I1011 05:10:15.344001 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-r47rs" event={"ID":"0a3cd96c-5270-46d9-befe-b18a3467ddde","Type":"ContainerDied","Data":"f0d5dd42154f318918654257dcf6c3f9e7191592a7221086692304cf8696c65e"} Oct 11 05:10:15 crc kubenswrapper[4651]: I1011 05:10:15.360196 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=3.360176089 podStartE2EDuration="3.360176089s" podCreationTimestamp="2025-10-11 05:10:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 05:10:14.350360251 +0000 UTC m=+1135.246593107" watchObservedRunningTime="2025-10-11 05:10:15.360176089 +0000 UTC m=+1136.256408895" Oct 11 05:10:16 crc kubenswrapper[4651]: I1011 05:10:16.310474 4651 patch_prober.go:28] interesting pod/machine-config-daemon-78jnv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 05:10:16 crc kubenswrapper[4651]: I1011 05:10:16.310562 4651 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 05:10:16 crc kubenswrapper[4651]: I1011 05:10:16.388340 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Oct 11 05:10:16 crc kubenswrapper[4651]: I1011 05:10:16.446719 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Oct 11 05:10:16 crc kubenswrapper[4651]: I1011 05:10:16.535926 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 11 05:10:16 crc kubenswrapper[4651]: I1011 05:10:16.536033 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 11 05:10:16 crc kubenswrapper[4651]: I1011 05:10:16.771455 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-865f5d856f-hjq47" Oct 11 05:10:16 crc kubenswrapper[4651]: I1011 05:10:16.836683 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6bb4fc677f-n8cw8"] Oct 11 05:10:16 crc kubenswrapper[4651]: I1011 05:10:16.837009 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6bb4fc677f-n8cw8" podUID="482a99d8-100a-4f9c-9cd2-2834cd349de7" containerName="dnsmasq-dns" containerID="cri-o://210573dc255d42d525fbf537a18cc0e583cf0b196696ef0450da8440358b0e41" gracePeriod=10 Oct 11 05:10:17 crc kubenswrapper[4651]: I1011 05:10:17.014845 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-prppv" Oct 11 05:10:17 crc kubenswrapper[4651]: I1011 05:10:17.029200 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-r47rs" Oct 11 05:10:17 crc kubenswrapper[4651]: I1011 05:10:17.171515 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0a3cd96c-5270-46d9-befe-b18a3467ddde-config-data\") pod \"0a3cd96c-5270-46d9-befe-b18a3467ddde\" (UID: \"0a3cd96c-5270-46d9-befe-b18a3467ddde\") " Oct 11 05:10:17 crc kubenswrapper[4651]: I1011 05:10:17.171563 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0a3cd96c-5270-46d9-befe-b18a3467ddde-scripts\") pod \"0a3cd96c-5270-46d9-befe-b18a3467ddde\" (UID: \"0a3cd96c-5270-46d9-befe-b18a3467ddde\") " Oct 11 05:10:17 crc kubenswrapper[4651]: I1011 05:10:17.171683 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5a6ed193-5ad3-4f79-aa3d-d35033be1f21-scripts\") pod \"5a6ed193-5ad3-4f79-aa3d-d35033be1f21\" (UID: \"5a6ed193-5ad3-4f79-aa3d-d35033be1f21\") " Oct 11 05:10:17 crc kubenswrapper[4651]: I1011 05:10:17.171706 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0a3cd96c-5270-46d9-befe-b18a3467ddde-combined-ca-bundle\") pod \"0a3cd96c-5270-46d9-befe-b18a3467ddde\" (UID: \"0a3cd96c-5270-46d9-befe-b18a3467ddde\") " Oct 11 05:10:17 crc kubenswrapper[4651]: I1011 05:10:17.171759 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5a6ed193-5ad3-4f79-aa3d-d35033be1f21-config-data\") pod \"5a6ed193-5ad3-4f79-aa3d-d35033be1f21\" (UID: \"5a6ed193-5ad3-4f79-aa3d-d35033be1f21\") " Oct 11 05:10:17 crc kubenswrapper[4651]: I1011 05:10:17.171803 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a6ed193-5ad3-4f79-aa3d-d35033be1f21-combined-ca-bundle\") pod \"5a6ed193-5ad3-4f79-aa3d-d35033be1f21\" (UID: \"5a6ed193-5ad3-4f79-aa3d-d35033be1f21\") " Oct 11 05:10:17 crc kubenswrapper[4651]: I1011 05:10:17.171855 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dt4ns\" (UniqueName: \"kubernetes.io/projected/0a3cd96c-5270-46d9-befe-b18a3467ddde-kube-api-access-dt4ns\") pod \"0a3cd96c-5270-46d9-befe-b18a3467ddde\" (UID: \"0a3cd96c-5270-46d9-befe-b18a3467ddde\") " Oct 11 05:10:17 crc kubenswrapper[4651]: I1011 05:10:17.171882 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q2nhp\" (UniqueName: \"kubernetes.io/projected/5a6ed193-5ad3-4f79-aa3d-d35033be1f21-kube-api-access-q2nhp\") pod \"5a6ed193-5ad3-4f79-aa3d-d35033be1f21\" (UID: \"5a6ed193-5ad3-4f79-aa3d-d35033be1f21\") " Oct 11 05:10:17 crc kubenswrapper[4651]: I1011 05:10:17.178982 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0a3cd96c-5270-46d9-befe-b18a3467ddde-scripts" (OuterVolumeSpecName: "scripts") pod "0a3cd96c-5270-46d9-befe-b18a3467ddde" (UID: "0a3cd96c-5270-46d9-befe-b18a3467ddde"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:10:17 crc kubenswrapper[4651]: I1011 05:10:17.179744 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5a6ed193-5ad3-4f79-aa3d-d35033be1f21-scripts" (OuterVolumeSpecName: "scripts") pod "5a6ed193-5ad3-4f79-aa3d-d35033be1f21" (UID: "5a6ed193-5ad3-4f79-aa3d-d35033be1f21"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:10:17 crc kubenswrapper[4651]: I1011 05:10:17.181110 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5a6ed193-5ad3-4f79-aa3d-d35033be1f21-kube-api-access-q2nhp" (OuterVolumeSpecName: "kube-api-access-q2nhp") pod "5a6ed193-5ad3-4f79-aa3d-d35033be1f21" (UID: "5a6ed193-5ad3-4f79-aa3d-d35033be1f21"). InnerVolumeSpecName "kube-api-access-q2nhp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:10:17 crc kubenswrapper[4651]: I1011 05:10:17.192601 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0a3cd96c-5270-46d9-befe-b18a3467ddde-kube-api-access-dt4ns" (OuterVolumeSpecName: "kube-api-access-dt4ns") pod "0a3cd96c-5270-46d9-befe-b18a3467ddde" (UID: "0a3cd96c-5270-46d9-befe-b18a3467ddde"). InnerVolumeSpecName "kube-api-access-dt4ns". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:10:17 crc kubenswrapper[4651]: I1011 05:10:17.212582 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5a6ed193-5ad3-4f79-aa3d-d35033be1f21-config-data" (OuterVolumeSpecName: "config-data") pod "5a6ed193-5ad3-4f79-aa3d-d35033be1f21" (UID: "5a6ed193-5ad3-4f79-aa3d-d35033be1f21"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:10:17 crc kubenswrapper[4651]: I1011 05:10:17.215947 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5a6ed193-5ad3-4f79-aa3d-d35033be1f21-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5a6ed193-5ad3-4f79-aa3d-d35033be1f21" (UID: "5a6ed193-5ad3-4f79-aa3d-d35033be1f21"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:10:17 crc kubenswrapper[4651]: I1011 05:10:17.219940 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0a3cd96c-5270-46d9-befe-b18a3467ddde-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0a3cd96c-5270-46d9-befe-b18a3467ddde" (UID: "0a3cd96c-5270-46d9-befe-b18a3467ddde"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:10:17 crc kubenswrapper[4651]: I1011 05:10:17.225999 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0a3cd96c-5270-46d9-befe-b18a3467ddde-config-data" (OuterVolumeSpecName: "config-data") pod "0a3cd96c-5270-46d9-befe-b18a3467ddde" (UID: "0a3cd96c-5270-46d9-befe-b18a3467ddde"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:10:17 crc kubenswrapper[4651]: I1011 05:10:17.275469 4651 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5a6ed193-5ad3-4f79-aa3d-d35033be1f21-config-data\") on node \"crc\" DevicePath \"\"" Oct 11 05:10:17 crc kubenswrapper[4651]: I1011 05:10:17.275500 4651 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a6ed193-5ad3-4f79-aa3d-d35033be1f21-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 05:10:17 crc kubenswrapper[4651]: I1011 05:10:17.276154 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dt4ns\" (UniqueName: \"kubernetes.io/projected/0a3cd96c-5270-46d9-befe-b18a3467ddde-kube-api-access-dt4ns\") on node \"crc\" DevicePath \"\"" Oct 11 05:10:17 crc kubenswrapper[4651]: I1011 05:10:17.276187 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q2nhp\" (UniqueName: \"kubernetes.io/projected/5a6ed193-5ad3-4f79-aa3d-d35033be1f21-kube-api-access-q2nhp\") on node \"crc\" DevicePath \"\"" Oct 11 05:10:17 crc kubenswrapper[4651]: I1011 05:10:17.276204 4651 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0a3cd96c-5270-46d9-befe-b18a3467ddde-config-data\") on node \"crc\" DevicePath \"\"" Oct 11 05:10:17 crc kubenswrapper[4651]: I1011 05:10:17.276213 4651 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0a3cd96c-5270-46d9-befe-b18a3467ddde-scripts\") on node \"crc\" DevicePath \"\"" Oct 11 05:10:17 crc kubenswrapper[4651]: I1011 05:10:17.276222 4651 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5a6ed193-5ad3-4f79-aa3d-d35033be1f21-scripts\") on node \"crc\" DevicePath \"\"" Oct 11 05:10:17 crc kubenswrapper[4651]: I1011 05:10:17.276231 4651 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0a3cd96c-5270-46d9-befe-b18a3467ddde-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 05:10:17 crc kubenswrapper[4651]: I1011 05:10:17.340619 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bb4fc677f-n8cw8" Oct 11 05:10:17 crc kubenswrapper[4651]: I1011 05:10:17.378502 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/482a99d8-100a-4f9c-9cd2-2834cd349de7-ovsdbserver-nb\") pod \"482a99d8-100a-4f9c-9cd2-2834cd349de7\" (UID: \"482a99d8-100a-4f9c-9cd2-2834cd349de7\") " Oct 11 05:10:17 crc kubenswrapper[4651]: I1011 05:10:17.378544 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/482a99d8-100a-4f9c-9cd2-2834cd349de7-dns-svc\") pod \"482a99d8-100a-4f9c-9cd2-2834cd349de7\" (UID: \"482a99d8-100a-4f9c-9cd2-2834cd349de7\") " Oct 11 05:10:17 crc kubenswrapper[4651]: I1011 05:10:17.378589 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qb5fq\" (UniqueName: \"kubernetes.io/projected/482a99d8-100a-4f9c-9cd2-2834cd349de7-kube-api-access-qb5fq\") pod \"482a99d8-100a-4f9c-9cd2-2834cd349de7\" (UID: \"482a99d8-100a-4f9c-9cd2-2834cd349de7\") " Oct 11 05:10:17 crc kubenswrapper[4651]: I1011 05:10:17.378636 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/482a99d8-100a-4f9c-9cd2-2834cd349de7-config\") pod \"482a99d8-100a-4f9c-9cd2-2834cd349de7\" (UID: \"482a99d8-100a-4f9c-9cd2-2834cd349de7\") " Oct 11 05:10:17 crc kubenswrapper[4651]: I1011 05:10:17.378656 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/482a99d8-100a-4f9c-9cd2-2834cd349de7-dns-swift-storage-0\") pod \"482a99d8-100a-4f9c-9cd2-2834cd349de7\" (UID: \"482a99d8-100a-4f9c-9cd2-2834cd349de7\") " Oct 11 05:10:17 crc kubenswrapper[4651]: I1011 05:10:17.378723 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/482a99d8-100a-4f9c-9cd2-2834cd349de7-ovsdbserver-sb\") pod \"482a99d8-100a-4f9c-9cd2-2834cd349de7\" (UID: \"482a99d8-100a-4f9c-9cd2-2834cd349de7\") " Oct 11 05:10:17 crc kubenswrapper[4651]: I1011 05:10:17.382812 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/482a99d8-100a-4f9c-9cd2-2834cd349de7-kube-api-access-qb5fq" (OuterVolumeSpecName: "kube-api-access-qb5fq") pod "482a99d8-100a-4f9c-9cd2-2834cd349de7" (UID: "482a99d8-100a-4f9c-9cd2-2834cd349de7"). InnerVolumeSpecName "kube-api-access-qb5fq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:10:17 crc kubenswrapper[4651]: I1011 05:10:17.383677 4651 generic.go:334] "Generic (PLEG): container finished" podID="482a99d8-100a-4f9c-9cd2-2834cd349de7" containerID="210573dc255d42d525fbf537a18cc0e583cf0b196696ef0450da8440358b0e41" exitCode=0 Oct 11 05:10:17 crc kubenswrapper[4651]: I1011 05:10:17.383728 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bb4fc677f-n8cw8" event={"ID":"482a99d8-100a-4f9c-9cd2-2834cd349de7","Type":"ContainerDied","Data":"210573dc255d42d525fbf537a18cc0e583cf0b196696ef0450da8440358b0e41"} Oct 11 05:10:17 crc kubenswrapper[4651]: I1011 05:10:17.383754 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bb4fc677f-n8cw8" event={"ID":"482a99d8-100a-4f9c-9cd2-2834cd349de7","Type":"ContainerDied","Data":"a9ccde7492abea2963ecbf7f53efe5f9e99b22749485d2d8b8f73b5de519e598"} Oct 11 05:10:17 crc kubenswrapper[4651]: I1011 05:10:17.383770 4651 scope.go:117] "RemoveContainer" containerID="210573dc255d42d525fbf537a18cc0e583cf0b196696ef0450da8440358b0e41" Oct 11 05:10:17 crc kubenswrapper[4651]: I1011 05:10:17.383923 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bb4fc677f-n8cw8" Oct 11 05:10:17 crc kubenswrapper[4651]: I1011 05:10:17.409408 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-r47rs" event={"ID":"0a3cd96c-5270-46d9-befe-b18a3467ddde","Type":"ContainerDied","Data":"091f19b58591e767dd01ec26d3de36877cdcc33037fd84227e9dd83ae7cda930"} Oct 11 05:10:17 crc kubenswrapper[4651]: I1011 05:10:17.409452 4651 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="091f19b58591e767dd01ec26d3de36877cdcc33037fd84227e9dd83ae7cda930" Oct 11 05:10:17 crc kubenswrapper[4651]: I1011 05:10:17.409513 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-r47rs" Oct 11 05:10:17 crc kubenswrapper[4651]: I1011 05:10:17.436796 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-prppv" Oct 11 05:10:17 crc kubenswrapper[4651]: I1011 05:10:17.436972 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-prppv" event={"ID":"5a6ed193-5ad3-4f79-aa3d-d35033be1f21","Type":"ContainerDied","Data":"0f576ecdb7edacb1abeee160cbf899dc874db6544fb54532b75558087638b6b1"} Oct 11 05:10:17 crc kubenswrapper[4651]: I1011 05:10:17.437013 4651 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0f576ecdb7edacb1abeee160cbf899dc874db6544fb54532b75558087638b6b1" Oct 11 05:10:17 crc kubenswrapper[4651]: I1011 05:10:17.465748 4651 scope.go:117] "RemoveContainer" containerID="8482bdca65317d8471325f307d0f1fc8137813a2ca1d91ee8ebc92194f7143b7" Oct 11 05:10:17 crc kubenswrapper[4651]: I1011 05:10:17.481362 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qb5fq\" (UniqueName: \"kubernetes.io/projected/482a99d8-100a-4f9c-9cd2-2834cd349de7-kube-api-access-qb5fq\") on node \"crc\" DevicePath \"\"" Oct 11 05:10:17 crc kubenswrapper[4651]: I1011 05:10:17.495572 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/482a99d8-100a-4f9c-9cd2-2834cd349de7-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "482a99d8-100a-4f9c-9cd2-2834cd349de7" (UID: "482a99d8-100a-4f9c-9cd2-2834cd349de7"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:10:17 crc kubenswrapper[4651]: I1011 05:10:17.500007 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 11 05:10:17 crc kubenswrapper[4651]: E1011 05:10:17.500638 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0a3cd96c-5270-46d9-befe-b18a3467ddde" containerName="nova-cell1-conductor-db-sync" Oct 11 05:10:17 crc kubenswrapper[4651]: I1011 05:10:17.500660 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="0a3cd96c-5270-46d9-befe-b18a3467ddde" containerName="nova-cell1-conductor-db-sync" Oct 11 05:10:17 crc kubenswrapper[4651]: E1011 05:10:17.500674 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="482a99d8-100a-4f9c-9cd2-2834cd349de7" containerName="dnsmasq-dns" Oct 11 05:10:17 crc kubenswrapper[4651]: I1011 05:10:17.500683 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="482a99d8-100a-4f9c-9cd2-2834cd349de7" containerName="dnsmasq-dns" Oct 11 05:10:17 crc kubenswrapper[4651]: E1011 05:10:17.500738 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5a6ed193-5ad3-4f79-aa3d-d35033be1f21" containerName="nova-manage" Oct 11 05:10:17 crc kubenswrapper[4651]: I1011 05:10:17.500748 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="5a6ed193-5ad3-4f79-aa3d-d35033be1f21" containerName="nova-manage" Oct 11 05:10:17 crc kubenswrapper[4651]: E1011 05:10:17.500768 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="482a99d8-100a-4f9c-9cd2-2834cd349de7" containerName="init" Oct 11 05:10:17 crc kubenswrapper[4651]: I1011 05:10:17.500775 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="482a99d8-100a-4f9c-9cd2-2834cd349de7" containerName="init" Oct 11 05:10:17 crc kubenswrapper[4651]: I1011 05:10:17.502479 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/482a99d8-100a-4f9c-9cd2-2834cd349de7-config" (OuterVolumeSpecName: "config") pod "482a99d8-100a-4f9c-9cd2-2834cd349de7" (UID: "482a99d8-100a-4f9c-9cd2-2834cd349de7"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:10:17 crc kubenswrapper[4651]: I1011 05:10:17.502719 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="482a99d8-100a-4f9c-9cd2-2834cd349de7" containerName="dnsmasq-dns" Oct 11 05:10:17 crc kubenswrapper[4651]: I1011 05:10:17.502757 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="5a6ed193-5ad3-4f79-aa3d-d35033be1f21" containerName="nova-manage" Oct 11 05:10:17 crc kubenswrapper[4651]: I1011 05:10:17.502779 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="0a3cd96c-5270-46d9-befe-b18a3467ddde" containerName="nova-cell1-conductor-db-sync" Oct 11 05:10:17 crc kubenswrapper[4651]: I1011 05:10:17.503734 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Oct 11 05:10:17 crc kubenswrapper[4651]: I1011 05:10:17.509060 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Oct 11 05:10:17 crc kubenswrapper[4651]: I1011 05:10:17.509324 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Oct 11 05:10:17 crc kubenswrapper[4651]: I1011 05:10:17.510672 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 11 05:10:17 crc kubenswrapper[4651]: I1011 05:10:17.521020 4651 scope.go:117] "RemoveContainer" containerID="210573dc255d42d525fbf537a18cc0e583cf0b196696ef0450da8440358b0e41" Oct 11 05:10:17 crc kubenswrapper[4651]: E1011 05:10:17.523513 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"210573dc255d42d525fbf537a18cc0e583cf0b196696ef0450da8440358b0e41\": container with ID starting with 210573dc255d42d525fbf537a18cc0e583cf0b196696ef0450da8440358b0e41 not found: ID does not exist" containerID="210573dc255d42d525fbf537a18cc0e583cf0b196696ef0450da8440358b0e41" Oct 11 05:10:17 crc kubenswrapper[4651]: I1011 05:10:17.523560 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"210573dc255d42d525fbf537a18cc0e583cf0b196696ef0450da8440358b0e41"} err="failed to get container status \"210573dc255d42d525fbf537a18cc0e583cf0b196696ef0450da8440358b0e41\": rpc error: code = NotFound desc = could not find container \"210573dc255d42d525fbf537a18cc0e583cf0b196696ef0450da8440358b0e41\": container with ID starting with 210573dc255d42d525fbf537a18cc0e583cf0b196696ef0450da8440358b0e41 not found: ID does not exist" Oct 11 05:10:17 crc kubenswrapper[4651]: I1011 05:10:17.523587 4651 scope.go:117] "RemoveContainer" containerID="8482bdca65317d8471325f307d0f1fc8137813a2ca1d91ee8ebc92194f7143b7" Oct 11 05:10:17 crc kubenswrapper[4651]: I1011 05:10:17.523968 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/482a99d8-100a-4f9c-9cd2-2834cd349de7-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "482a99d8-100a-4f9c-9cd2-2834cd349de7" (UID: "482a99d8-100a-4f9c-9cd2-2834cd349de7"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:10:17 crc kubenswrapper[4651]: E1011 05:10:17.525522 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8482bdca65317d8471325f307d0f1fc8137813a2ca1d91ee8ebc92194f7143b7\": container with ID starting with 8482bdca65317d8471325f307d0f1fc8137813a2ca1d91ee8ebc92194f7143b7 not found: ID does not exist" containerID="8482bdca65317d8471325f307d0f1fc8137813a2ca1d91ee8ebc92194f7143b7" Oct 11 05:10:17 crc kubenswrapper[4651]: I1011 05:10:17.525573 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8482bdca65317d8471325f307d0f1fc8137813a2ca1d91ee8ebc92194f7143b7"} err="failed to get container status \"8482bdca65317d8471325f307d0f1fc8137813a2ca1d91ee8ebc92194f7143b7\": rpc error: code = NotFound desc = could not find container \"8482bdca65317d8471325f307d0f1fc8137813a2ca1d91ee8ebc92194f7143b7\": container with ID starting with 8482bdca65317d8471325f307d0f1fc8137813a2ca1d91ee8ebc92194f7143b7 not found: ID does not exist" Oct 11 05:10:17 crc kubenswrapper[4651]: I1011 05:10:17.555478 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/482a99d8-100a-4f9c-9cd2-2834cd349de7-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "482a99d8-100a-4f9c-9cd2-2834cd349de7" (UID: "482a99d8-100a-4f9c-9cd2-2834cd349de7"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:10:17 crc kubenswrapper[4651]: I1011 05:10:17.583233 4651 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/482a99d8-100a-4f9c-9cd2-2834cd349de7-config\") on node \"crc\" DevicePath \"\"" Oct 11 05:10:17 crc kubenswrapper[4651]: I1011 05:10:17.583267 4651 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/482a99d8-100a-4f9c-9cd2-2834cd349de7-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 11 05:10:17 crc kubenswrapper[4651]: I1011 05:10:17.583277 4651 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/482a99d8-100a-4f9c-9cd2-2834cd349de7-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 11 05:10:17 crc kubenswrapper[4651]: I1011 05:10:17.583288 4651 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/482a99d8-100a-4f9c-9cd2-2834cd349de7-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 11 05:10:17 crc kubenswrapper[4651]: I1011 05:10:17.590731 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/482a99d8-100a-4f9c-9cd2-2834cd349de7-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "482a99d8-100a-4f9c-9cd2-2834cd349de7" (UID: "482a99d8-100a-4f9c-9cd2-2834cd349de7"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:10:17 crc kubenswrapper[4651]: I1011 05:10:17.592062 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 11 05:10:17 crc kubenswrapper[4651]: I1011 05:10:17.592363 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="8a5d8d3c-139c-4d8b-94ec-38316a4534d3" containerName="nova-api-log" containerID="cri-o://6a98d8d406b9633cc15a7fd907e9f68ce3d2f462fd619613d209a2cd067cf200" gracePeriod=30 Oct 11 05:10:17 crc kubenswrapper[4651]: I1011 05:10:17.592909 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="8a5d8d3c-139c-4d8b-94ec-38316a4534d3" containerName="nova-api-api" containerID="cri-o://ba84dac6a4e13cc66539d24b28b518fdaa622f6434cd3e3f606907765d7286d9" gracePeriod=30 Oct 11 05:10:17 crc kubenswrapper[4651]: I1011 05:10:17.600989 4651 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="8a5d8d3c-139c-4d8b-94ec-38316a4534d3" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.191:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 11 05:10:17 crc kubenswrapper[4651]: I1011 05:10:17.601117 4651 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="8a5d8d3c-139c-4d8b-94ec-38316a4534d3" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.191:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 11 05:10:17 crc kubenswrapper[4651]: I1011 05:10:17.602895 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 11 05:10:17 crc kubenswrapper[4651]: I1011 05:10:17.613076 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 11 05:10:17 crc kubenswrapper[4651]: I1011 05:10:17.613402 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="ec7e759f-da4e-4dcb-93dc-38007a6f35e5" containerName="nova-metadata-log" containerID="cri-o://e8310cbdad903a169c29278c76f4ad365cc30cac07dd78e4341cd0d715f06c5d" gracePeriod=30 Oct 11 05:10:17 crc kubenswrapper[4651]: I1011 05:10:17.614157 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="ec7e759f-da4e-4dcb-93dc-38007a6f35e5" containerName="nova-metadata-metadata" containerID="cri-o://12fae75d98e36b27b918f532dc62592c5639e56448f4b6f0edce247167f3ad36" gracePeriod=30 Oct 11 05:10:17 crc kubenswrapper[4651]: I1011 05:10:17.684413 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2b4f087-b74d-4683-815d-35b6f7736f04-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"d2b4f087-b74d-4683-815d-35b6f7736f04\") " pod="openstack/nova-cell1-conductor-0" Oct 11 05:10:17 crc kubenswrapper[4651]: I1011 05:10:17.684488 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kbdbr\" (UniqueName: \"kubernetes.io/projected/d2b4f087-b74d-4683-815d-35b6f7736f04-kube-api-access-kbdbr\") pod \"nova-cell1-conductor-0\" (UID: \"d2b4f087-b74d-4683-815d-35b6f7736f04\") " pod="openstack/nova-cell1-conductor-0" Oct 11 05:10:17 crc kubenswrapper[4651]: I1011 05:10:17.684649 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d2b4f087-b74d-4683-815d-35b6f7736f04-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"d2b4f087-b74d-4683-815d-35b6f7736f04\") " pod="openstack/nova-cell1-conductor-0" Oct 11 05:10:17 crc kubenswrapper[4651]: I1011 05:10:17.684786 4651 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/482a99d8-100a-4f9c-9cd2-2834cd349de7-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 11 05:10:17 crc kubenswrapper[4651]: I1011 05:10:17.750400 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6bb4fc677f-n8cw8"] Oct 11 05:10:17 crc kubenswrapper[4651]: I1011 05:10:17.764930 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6bb4fc677f-n8cw8"] Oct 11 05:10:17 crc kubenswrapper[4651]: I1011 05:10:17.786483 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kbdbr\" (UniqueName: \"kubernetes.io/projected/d2b4f087-b74d-4683-815d-35b6f7736f04-kube-api-access-kbdbr\") pod \"nova-cell1-conductor-0\" (UID: \"d2b4f087-b74d-4683-815d-35b6f7736f04\") " pod="openstack/nova-cell1-conductor-0" Oct 11 05:10:17 crc kubenswrapper[4651]: I1011 05:10:17.786629 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d2b4f087-b74d-4683-815d-35b6f7736f04-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"d2b4f087-b74d-4683-815d-35b6f7736f04\") " pod="openstack/nova-cell1-conductor-0" Oct 11 05:10:17 crc kubenswrapper[4651]: I1011 05:10:17.786692 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2b4f087-b74d-4683-815d-35b6f7736f04-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"d2b4f087-b74d-4683-815d-35b6f7736f04\") " pod="openstack/nova-cell1-conductor-0" Oct 11 05:10:17 crc kubenswrapper[4651]: I1011 05:10:17.795708 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d2b4f087-b74d-4683-815d-35b6f7736f04-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"d2b4f087-b74d-4683-815d-35b6f7736f04\") " pod="openstack/nova-cell1-conductor-0" Oct 11 05:10:17 crc kubenswrapper[4651]: I1011 05:10:17.806512 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kbdbr\" (UniqueName: \"kubernetes.io/projected/d2b4f087-b74d-4683-815d-35b6f7736f04-kube-api-access-kbdbr\") pod \"nova-cell1-conductor-0\" (UID: \"d2b4f087-b74d-4683-815d-35b6f7736f04\") " pod="openstack/nova-cell1-conductor-0" Oct 11 05:10:17 crc kubenswrapper[4651]: I1011 05:10:17.807593 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2b4f087-b74d-4683-815d-35b6f7736f04-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"d2b4f087-b74d-4683-815d-35b6f7736f04\") " pod="openstack/nova-cell1-conductor-0" Oct 11 05:10:17 crc kubenswrapper[4651]: I1011 05:10:17.857226 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Oct 11 05:10:17 crc kubenswrapper[4651]: E1011 05:10:17.896496 4651 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod482a99d8_100a_4f9c_9cd2_2834cd349de7.slice/crio-a9ccde7492abea2963ecbf7f53efe5f9e99b22749485d2d8b8f73b5de519e598\": RecentStats: unable to find data in memory cache]" Oct 11 05:10:17 crc kubenswrapper[4651]: I1011 05:10:17.901915 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="482a99d8-100a-4f9c-9cd2-2834cd349de7" path="/var/lib/kubelet/pods/482a99d8-100a-4f9c-9cd2-2834cd349de7/volumes" Oct 11 05:10:18 crc kubenswrapper[4651]: I1011 05:10:18.031899 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 11 05:10:18 crc kubenswrapper[4651]: I1011 05:10:18.032397 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 11 05:10:18 crc kubenswrapper[4651]: I1011 05:10:18.189899 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 11 05:10:18 crc kubenswrapper[4651]: I1011 05:10:18.300899 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec7e759f-da4e-4dcb-93dc-38007a6f35e5-combined-ca-bundle\") pod \"ec7e759f-da4e-4dcb-93dc-38007a6f35e5\" (UID: \"ec7e759f-da4e-4dcb-93dc-38007a6f35e5\") " Oct 11 05:10:18 crc kubenswrapper[4651]: I1011 05:10:18.300975 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ec7e759f-da4e-4dcb-93dc-38007a6f35e5-logs\") pod \"ec7e759f-da4e-4dcb-93dc-38007a6f35e5\" (UID: \"ec7e759f-da4e-4dcb-93dc-38007a6f35e5\") " Oct 11 05:10:18 crc kubenswrapper[4651]: I1011 05:10:18.301007 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/ec7e759f-da4e-4dcb-93dc-38007a6f35e5-nova-metadata-tls-certs\") pod \"ec7e759f-da4e-4dcb-93dc-38007a6f35e5\" (UID: \"ec7e759f-da4e-4dcb-93dc-38007a6f35e5\") " Oct 11 05:10:18 crc kubenswrapper[4651]: I1011 05:10:18.301027 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec7e759f-da4e-4dcb-93dc-38007a6f35e5-config-data\") pod \"ec7e759f-da4e-4dcb-93dc-38007a6f35e5\" (UID: \"ec7e759f-da4e-4dcb-93dc-38007a6f35e5\") " Oct 11 05:10:18 crc kubenswrapper[4651]: I1011 05:10:18.301047 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jt9z5\" (UniqueName: \"kubernetes.io/projected/ec7e759f-da4e-4dcb-93dc-38007a6f35e5-kube-api-access-jt9z5\") pod \"ec7e759f-da4e-4dcb-93dc-38007a6f35e5\" (UID: \"ec7e759f-da4e-4dcb-93dc-38007a6f35e5\") " Oct 11 05:10:18 crc kubenswrapper[4651]: I1011 05:10:18.302086 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ec7e759f-da4e-4dcb-93dc-38007a6f35e5-logs" (OuterVolumeSpecName: "logs") pod "ec7e759f-da4e-4dcb-93dc-38007a6f35e5" (UID: "ec7e759f-da4e-4dcb-93dc-38007a6f35e5"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 05:10:18 crc kubenswrapper[4651]: I1011 05:10:18.306429 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ec7e759f-da4e-4dcb-93dc-38007a6f35e5-kube-api-access-jt9z5" (OuterVolumeSpecName: "kube-api-access-jt9z5") pod "ec7e759f-da4e-4dcb-93dc-38007a6f35e5" (UID: "ec7e759f-da4e-4dcb-93dc-38007a6f35e5"). InnerVolumeSpecName "kube-api-access-jt9z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:10:18 crc kubenswrapper[4651]: I1011 05:10:18.328952 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ec7e759f-da4e-4dcb-93dc-38007a6f35e5-config-data" (OuterVolumeSpecName: "config-data") pod "ec7e759f-da4e-4dcb-93dc-38007a6f35e5" (UID: "ec7e759f-da4e-4dcb-93dc-38007a6f35e5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:10:18 crc kubenswrapper[4651]: I1011 05:10:18.333884 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ec7e759f-da4e-4dcb-93dc-38007a6f35e5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ec7e759f-da4e-4dcb-93dc-38007a6f35e5" (UID: "ec7e759f-da4e-4dcb-93dc-38007a6f35e5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:10:18 crc kubenswrapper[4651]: I1011 05:10:18.354384 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ec7e759f-da4e-4dcb-93dc-38007a6f35e5-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "ec7e759f-da4e-4dcb-93dc-38007a6f35e5" (UID: "ec7e759f-da4e-4dcb-93dc-38007a6f35e5"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:10:18 crc kubenswrapper[4651]: I1011 05:10:18.403459 4651 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec7e759f-da4e-4dcb-93dc-38007a6f35e5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 05:10:18 crc kubenswrapper[4651]: I1011 05:10:18.403504 4651 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ec7e759f-da4e-4dcb-93dc-38007a6f35e5-logs\") on node \"crc\" DevicePath \"\"" Oct 11 05:10:18 crc kubenswrapper[4651]: I1011 05:10:18.403531 4651 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/ec7e759f-da4e-4dcb-93dc-38007a6f35e5-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 11 05:10:18 crc kubenswrapper[4651]: I1011 05:10:18.403546 4651 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec7e759f-da4e-4dcb-93dc-38007a6f35e5-config-data\") on node \"crc\" DevicePath \"\"" Oct 11 05:10:18 crc kubenswrapper[4651]: I1011 05:10:18.403558 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jt9z5\" (UniqueName: \"kubernetes.io/projected/ec7e759f-da4e-4dcb-93dc-38007a6f35e5-kube-api-access-jt9z5\") on node \"crc\" DevicePath \"\"" Oct 11 05:10:18 crc kubenswrapper[4651]: I1011 05:10:18.425440 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 11 05:10:18 crc kubenswrapper[4651]: W1011 05:10:18.428219 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd2b4f087_b74d_4683_815d_35b6f7736f04.slice/crio-e16a1349f0b018c3f732cc63b2fc93708731522632d533d37236e9c97f792161 WatchSource:0}: Error finding container e16a1349f0b018c3f732cc63b2fc93708731522632d533d37236e9c97f792161: Status 404 returned error can't find the container with id e16a1349f0b018c3f732cc63b2fc93708731522632d533d37236e9c97f792161 Oct 11 05:10:18 crc kubenswrapper[4651]: I1011 05:10:18.449280 4651 generic.go:334] "Generic (PLEG): container finished" podID="8a5d8d3c-139c-4d8b-94ec-38316a4534d3" containerID="6a98d8d406b9633cc15a7fd907e9f68ce3d2f462fd619613d209a2cd067cf200" exitCode=143 Oct 11 05:10:18 crc kubenswrapper[4651]: I1011 05:10:18.449355 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"8a5d8d3c-139c-4d8b-94ec-38316a4534d3","Type":"ContainerDied","Data":"6a98d8d406b9633cc15a7fd907e9f68ce3d2f462fd619613d209a2cd067cf200"} Oct 11 05:10:18 crc kubenswrapper[4651]: I1011 05:10:18.453425 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"d2b4f087-b74d-4683-815d-35b6f7736f04","Type":"ContainerStarted","Data":"e16a1349f0b018c3f732cc63b2fc93708731522632d533d37236e9c97f792161"} Oct 11 05:10:18 crc kubenswrapper[4651]: I1011 05:10:18.455410 4651 generic.go:334] "Generic (PLEG): container finished" podID="ec7e759f-da4e-4dcb-93dc-38007a6f35e5" containerID="12fae75d98e36b27b918f532dc62592c5639e56448f4b6f0edce247167f3ad36" exitCode=0 Oct 11 05:10:18 crc kubenswrapper[4651]: I1011 05:10:18.455440 4651 generic.go:334] "Generic (PLEG): container finished" podID="ec7e759f-da4e-4dcb-93dc-38007a6f35e5" containerID="e8310cbdad903a169c29278c76f4ad365cc30cac07dd78e4341cd0d715f06c5d" exitCode=143 Oct 11 05:10:18 crc kubenswrapper[4651]: I1011 05:10:18.455476 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 11 05:10:18 crc kubenswrapper[4651]: I1011 05:10:18.455510 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ec7e759f-da4e-4dcb-93dc-38007a6f35e5","Type":"ContainerDied","Data":"12fae75d98e36b27b918f532dc62592c5639e56448f4b6f0edce247167f3ad36"} Oct 11 05:10:18 crc kubenswrapper[4651]: I1011 05:10:18.455558 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ec7e759f-da4e-4dcb-93dc-38007a6f35e5","Type":"ContainerDied","Data":"e8310cbdad903a169c29278c76f4ad365cc30cac07dd78e4341cd0d715f06c5d"} Oct 11 05:10:18 crc kubenswrapper[4651]: I1011 05:10:18.455571 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ec7e759f-da4e-4dcb-93dc-38007a6f35e5","Type":"ContainerDied","Data":"03bac5b0e38a5078d623f561b2e8a42f590b55f6d12c48cb9cd67b74c5d0ef90"} Oct 11 05:10:18 crc kubenswrapper[4651]: I1011 05:10:18.455586 4651 scope.go:117] "RemoveContainer" containerID="12fae75d98e36b27b918f532dc62592c5639e56448f4b6f0edce247167f3ad36" Oct 11 05:10:18 crc kubenswrapper[4651]: I1011 05:10:18.480242 4651 scope.go:117] "RemoveContainer" containerID="e8310cbdad903a169c29278c76f4ad365cc30cac07dd78e4341cd0d715f06c5d" Oct 11 05:10:18 crc kubenswrapper[4651]: I1011 05:10:18.487381 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 11 05:10:18 crc kubenswrapper[4651]: I1011 05:10:18.496676 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Oct 11 05:10:18 crc kubenswrapper[4651]: I1011 05:10:18.520494 4651 scope.go:117] "RemoveContainer" containerID="12fae75d98e36b27b918f532dc62592c5639e56448f4b6f0edce247167f3ad36" Oct 11 05:10:18 crc kubenswrapper[4651]: I1011 05:10:18.520624 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Oct 11 05:10:18 crc kubenswrapper[4651]: E1011 05:10:18.521075 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec7e759f-da4e-4dcb-93dc-38007a6f35e5" containerName="nova-metadata-log" Oct 11 05:10:18 crc kubenswrapper[4651]: I1011 05:10:18.521094 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec7e759f-da4e-4dcb-93dc-38007a6f35e5" containerName="nova-metadata-log" Oct 11 05:10:18 crc kubenswrapper[4651]: E1011 05:10:18.521131 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec7e759f-da4e-4dcb-93dc-38007a6f35e5" containerName="nova-metadata-metadata" Oct 11 05:10:18 crc kubenswrapper[4651]: I1011 05:10:18.521139 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec7e759f-da4e-4dcb-93dc-38007a6f35e5" containerName="nova-metadata-metadata" Oct 11 05:10:18 crc kubenswrapper[4651]: I1011 05:10:18.521335 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="ec7e759f-da4e-4dcb-93dc-38007a6f35e5" containerName="nova-metadata-metadata" Oct 11 05:10:18 crc kubenswrapper[4651]: I1011 05:10:18.521365 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="ec7e759f-da4e-4dcb-93dc-38007a6f35e5" containerName="nova-metadata-log" Oct 11 05:10:18 crc kubenswrapper[4651]: E1011 05:10:18.522026 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"12fae75d98e36b27b918f532dc62592c5639e56448f4b6f0edce247167f3ad36\": container with ID starting with 12fae75d98e36b27b918f532dc62592c5639e56448f4b6f0edce247167f3ad36 not found: ID does not exist" containerID="12fae75d98e36b27b918f532dc62592c5639e56448f4b6f0edce247167f3ad36" Oct 11 05:10:18 crc kubenswrapper[4651]: I1011 05:10:18.522074 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"12fae75d98e36b27b918f532dc62592c5639e56448f4b6f0edce247167f3ad36"} err="failed to get container status \"12fae75d98e36b27b918f532dc62592c5639e56448f4b6f0edce247167f3ad36\": rpc error: code = NotFound desc = could not find container \"12fae75d98e36b27b918f532dc62592c5639e56448f4b6f0edce247167f3ad36\": container with ID starting with 12fae75d98e36b27b918f532dc62592c5639e56448f4b6f0edce247167f3ad36 not found: ID does not exist" Oct 11 05:10:18 crc kubenswrapper[4651]: I1011 05:10:18.522104 4651 scope.go:117] "RemoveContainer" containerID="e8310cbdad903a169c29278c76f4ad365cc30cac07dd78e4341cd0d715f06c5d" Oct 11 05:10:18 crc kubenswrapper[4651]: I1011 05:10:18.522325 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 11 05:10:18 crc kubenswrapper[4651]: I1011 05:10:18.524922 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Oct 11 05:10:18 crc kubenswrapper[4651]: I1011 05:10:18.525196 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Oct 11 05:10:18 crc kubenswrapper[4651]: E1011 05:10:18.525935 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e8310cbdad903a169c29278c76f4ad365cc30cac07dd78e4341cd0d715f06c5d\": container with ID starting with e8310cbdad903a169c29278c76f4ad365cc30cac07dd78e4341cd0d715f06c5d not found: ID does not exist" containerID="e8310cbdad903a169c29278c76f4ad365cc30cac07dd78e4341cd0d715f06c5d" Oct 11 05:10:18 crc kubenswrapper[4651]: I1011 05:10:18.525988 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e8310cbdad903a169c29278c76f4ad365cc30cac07dd78e4341cd0d715f06c5d"} err="failed to get container status \"e8310cbdad903a169c29278c76f4ad365cc30cac07dd78e4341cd0d715f06c5d\": rpc error: code = NotFound desc = could not find container \"e8310cbdad903a169c29278c76f4ad365cc30cac07dd78e4341cd0d715f06c5d\": container with ID starting with e8310cbdad903a169c29278c76f4ad365cc30cac07dd78e4341cd0d715f06c5d not found: ID does not exist" Oct 11 05:10:18 crc kubenswrapper[4651]: I1011 05:10:18.526017 4651 scope.go:117] "RemoveContainer" containerID="12fae75d98e36b27b918f532dc62592c5639e56448f4b6f0edce247167f3ad36" Oct 11 05:10:18 crc kubenswrapper[4651]: I1011 05:10:18.526552 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"12fae75d98e36b27b918f532dc62592c5639e56448f4b6f0edce247167f3ad36"} err="failed to get container status \"12fae75d98e36b27b918f532dc62592c5639e56448f4b6f0edce247167f3ad36\": rpc error: code = NotFound desc = could not find container \"12fae75d98e36b27b918f532dc62592c5639e56448f4b6f0edce247167f3ad36\": container with ID starting with 12fae75d98e36b27b918f532dc62592c5639e56448f4b6f0edce247167f3ad36 not found: ID does not exist" Oct 11 05:10:18 crc kubenswrapper[4651]: I1011 05:10:18.526585 4651 scope.go:117] "RemoveContainer" containerID="e8310cbdad903a169c29278c76f4ad365cc30cac07dd78e4341cd0d715f06c5d" Oct 11 05:10:18 crc kubenswrapper[4651]: I1011 05:10:18.526812 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e8310cbdad903a169c29278c76f4ad365cc30cac07dd78e4341cd0d715f06c5d"} err="failed to get container status \"e8310cbdad903a169c29278c76f4ad365cc30cac07dd78e4341cd0d715f06c5d\": rpc error: code = NotFound desc = could not find container \"e8310cbdad903a169c29278c76f4ad365cc30cac07dd78e4341cd0d715f06c5d\": container with ID starting with e8310cbdad903a169c29278c76f4ad365cc30cac07dd78e4341cd0d715f06c5d not found: ID does not exist" Oct 11 05:10:18 crc kubenswrapper[4651]: I1011 05:10:18.529494 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 11 05:10:18 crc kubenswrapper[4651]: I1011 05:10:18.709419 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/29a549a6-e4d5-41c6-8042-d6a88f17fa94-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"29a549a6-e4d5-41c6-8042-d6a88f17fa94\") " pod="openstack/nova-metadata-0" Oct 11 05:10:18 crc kubenswrapper[4651]: I1011 05:10:18.709472 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29a549a6-e4d5-41c6-8042-d6a88f17fa94-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"29a549a6-e4d5-41c6-8042-d6a88f17fa94\") " pod="openstack/nova-metadata-0" Oct 11 05:10:18 crc kubenswrapper[4651]: I1011 05:10:18.709502 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/29a549a6-e4d5-41c6-8042-d6a88f17fa94-config-data\") pod \"nova-metadata-0\" (UID: \"29a549a6-e4d5-41c6-8042-d6a88f17fa94\") " pod="openstack/nova-metadata-0" Oct 11 05:10:18 crc kubenswrapper[4651]: I1011 05:10:18.709667 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/29a549a6-e4d5-41c6-8042-d6a88f17fa94-logs\") pod \"nova-metadata-0\" (UID: \"29a549a6-e4d5-41c6-8042-d6a88f17fa94\") " pod="openstack/nova-metadata-0" Oct 11 05:10:18 crc kubenswrapper[4651]: I1011 05:10:18.709713 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hhjm9\" (UniqueName: \"kubernetes.io/projected/29a549a6-e4d5-41c6-8042-d6a88f17fa94-kube-api-access-hhjm9\") pod \"nova-metadata-0\" (UID: \"29a549a6-e4d5-41c6-8042-d6a88f17fa94\") " pod="openstack/nova-metadata-0" Oct 11 05:10:18 crc kubenswrapper[4651]: I1011 05:10:18.811256 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/29a549a6-e4d5-41c6-8042-d6a88f17fa94-logs\") pod \"nova-metadata-0\" (UID: \"29a549a6-e4d5-41c6-8042-d6a88f17fa94\") " pod="openstack/nova-metadata-0" Oct 11 05:10:18 crc kubenswrapper[4651]: I1011 05:10:18.811331 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hhjm9\" (UniqueName: \"kubernetes.io/projected/29a549a6-e4d5-41c6-8042-d6a88f17fa94-kube-api-access-hhjm9\") pod \"nova-metadata-0\" (UID: \"29a549a6-e4d5-41c6-8042-d6a88f17fa94\") " pod="openstack/nova-metadata-0" Oct 11 05:10:18 crc kubenswrapper[4651]: I1011 05:10:18.811375 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/29a549a6-e4d5-41c6-8042-d6a88f17fa94-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"29a549a6-e4d5-41c6-8042-d6a88f17fa94\") " pod="openstack/nova-metadata-0" Oct 11 05:10:18 crc kubenswrapper[4651]: I1011 05:10:18.811398 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29a549a6-e4d5-41c6-8042-d6a88f17fa94-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"29a549a6-e4d5-41c6-8042-d6a88f17fa94\") " pod="openstack/nova-metadata-0" Oct 11 05:10:18 crc kubenswrapper[4651]: I1011 05:10:18.811420 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/29a549a6-e4d5-41c6-8042-d6a88f17fa94-config-data\") pod \"nova-metadata-0\" (UID: \"29a549a6-e4d5-41c6-8042-d6a88f17fa94\") " pod="openstack/nova-metadata-0" Oct 11 05:10:18 crc kubenswrapper[4651]: I1011 05:10:18.812541 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/29a549a6-e4d5-41c6-8042-d6a88f17fa94-logs\") pod \"nova-metadata-0\" (UID: \"29a549a6-e4d5-41c6-8042-d6a88f17fa94\") " pod="openstack/nova-metadata-0" Oct 11 05:10:18 crc kubenswrapper[4651]: I1011 05:10:18.816440 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/29a549a6-e4d5-41c6-8042-d6a88f17fa94-config-data\") pod \"nova-metadata-0\" (UID: \"29a549a6-e4d5-41c6-8042-d6a88f17fa94\") " pod="openstack/nova-metadata-0" Oct 11 05:10:18 crc kubenswrapper[4651]: I1011 05:10:18.816634 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29a549a6-e4d5-41c6-8042-d6a88f17fa94-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"29a549a6-e4d5-41c6-8042-d6a88f17fa94\") " pod="openstack/nova-metadata-0" Oct 11 05:10:18 crc kubenswrapper[4651]: I1011 05:10:18.819138 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/29a549a6-e4d5-41c6-8042-d6a88f17fa94-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"29a549a6-e4d5-41c6-8042-d6a88f17fa94\") " pod="openstack/nova-metadata-0" Oct 11 05:10:18 crc kubenswrapper[4651]: I1011 05:10:18.829095 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hhjm9\" (UniqueName: \"kubernetes.io/projected/29a549a6-e4d5-41c6-8042-d6a88f17fa94-kube-api-access-hhjm9\") pod \"nova-metadata-0\" (UID: \"29a549a6-e4d5-41c6-8042-d6a88f17fa94\") " pod="openstack/nova-metadata-0" Oct 11 05:10:18 crc kubenswrapper[4651]: I1011 05:10:18.839628 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 11 05:10:19 crc kubenswrapper[4651]: I1011 05:10:19.290249 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 11 05:10:19 crc kubenswrapper[4651]: W1011 05:10:19.295938 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod29a549a6_e4d5_41c6_8042_d6a88f17fa94.slice/crio-037ce715335264026e41bc2fbe5d05f41cbc7fc8c158cde22dd67a1f1c227123 WatchSource:0}: Error finding container 037ce715335264026e41bc2fbe5d05f41cbc7fc8c158cde22dd67a1f1c227123: Status 404 returned error can't find the container with id 037ce715335264026e41bc2fbe5d05f41cbc7fc8c158cde22dd67a1f1c227123 Oct 11 05:10:19 crc kubenswrapper[4651]: I1011 05:10:19.482480 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"29a549a6-e4d5-41c6-8042-d6a88f17fa94","Type":"ContainerStarted","Data":"069ff3e520fa9810a7879f49294ab184f3ea5b81ae4dfe4ac1e021c70b5be95d"} Oct 11 05:10:19 crc kubenswrapper[4651]: I1011 05:10:19.482881 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"29a549a6-e4d5-41c6-8042-d6a88f17fa94","Type":"ContainerStarted","Data":"037ce715335264026e41bc2fbe5d05f41cbc7fc8c158cde22dd67a1f1c227123"} Oct 11 05:10:19 crc kubenswrapper[4651]: I1011 05:10:19.485146 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"d2b4f087-b74d-4683-815d-35b6f7736f04","Type":"ContainerStarted","Data":"7cbaeb1d9388a8ab44eedf8b4d3052c888cb7b9e39533f29f1900fc2ece3f9f1"} Oct 11 05:10:19 crc kubenswrapper[4651]: I1011 05:10:19.485236 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Oct 11 05:10:19 crc kubenswrapper[4651]: I1011 05:10:19.489070 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="8432ef3d-65ef-4602-a0b6-d79d9ae9a73b" containerName="nova-scheduler-scheduler" containerID="cri-o://77e5112d038240ad7b4f0dee7a7dbf0208e5696fc759677bdb99b1eef7e5d1fc" gracePeriod=30 Oct 11 05:10:19 crc kubenswrapper[4651]: I1011 05:10:19.508097 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.508077527 podStartE2EDuration="2.508077527s" podCreationTimestamp="2025-10-11 05:10:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 05:10:19.503165364 +0000 UTC m=+1140.399398160" watchObservedRunningTime="2025-10-11 05:10:19.508077527 +0000 UTC m=+1140.404310323" Oct 11 05:10:19 crc kubenswrapper[4651]: I1011 05:10:19.886421 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ec7e759f-da4e-4dcb-93dc-38007a6f35e5" path="/var/lib/kubelet/pods/ec7e759f-da4e-4dcb-93dc-38007a6f35e5/volumes" Oct 11 05:10:20 crc kubenswrapper[4651]: I1011 05:10:20.503058 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"29a549a6-e4d5-41c6-8042-d6a88f17fa94","Type":"ContainerStarted","Data":"d261e942118d66979e4108db8d86a64b96d0bb437acdfd1c0130c3a62af2635d"} Oct 11 05:10:20 crc kubenswrapper[4651]: I1011 05:10:20.524724 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.524704575 podStartE2EDuration="2.524704575s" podCreationTimestamp="2025-10-11 05:10:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 05:10:20.518190552 +0000 UTC m=+1141.414423368" watchObservedRunningTime="2025-10-11 05:10:20.524704575 +0000 UTC m=+1141.420937381" Oct 11 05:10:21 crc kubenswrapper[4651]: E1011 05:10:21.390100 4651 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="77e5112d038240ad7b4f0dee7a7dbf0208e5696fc759677bdb99b1eef7e5d1fc" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 11 05:10:21 crc kubenswrapper[4651]: E1011 05:10:21.392408 4651 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="77e5112d038240ad7b4f0dee7a7dbf0208e5696fc759677bdb99b1eef7e5d1fc" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 11 05:10:21 crc kubenswrapper[4651]: E1011 05:10:21.393739 4651 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="77e5112d038240ad7b4f0dee7a7dbf0208e5696fc759677bdb99b1eef7e5d1fc" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 11 05:10:21 crc kubenswrapper[4651]: E1011 05:10:21.393787 4651 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="8432ef3d-65ef-4602-a0b6-d79d9ae9a73b" containerName="nova-scheduler-scheduler" Oct 11 05:10:22 crc kubenswrapper[4651]: I1011 05:10:22.532623 4651 generic.go:334] "Generic (PLEG): container finished" podID="8432ef3d-65ef-4602-a0b6-d79d9ae9a73b" containerID="77e5112d038240ad7b4f0dee7a7dbf0208e5696fc759677bdb99b1eef7e5d1fc" exitCode=0 Oct 11 05:10:22 crc kubenswrapper[4651]: I1011 05:10:22.532710 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"8432ef3d-65ef-4602-a0b6-d79d9ae9a73b","Type":"ContainerDied","Data":"77e5112d038240ad7b4f0dee7a7dbf0208e5696fc759677bdb99b1eef7e5d1fc"} Oct 11 05:10:22 crc kubenswrapper[4651]: I1011 05:10:22.532907 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"8432ef3d-65ef-4602-a0b6-d79d9ae9a73b","Type":"ContainerDied","Data":"d1f56bb552021ca02540c943f0b23f33975b942c3207974195efa785a7149293"} Oct 11 05:10:22 crc kubenswrapper[4651]: I1011 05:10:22.532926 4651 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d1f56bb552021ca02540c943f0b23f33975b942c3207974195efa785a7149293" Oct 11 05:10:22 crc kubenswrapper[4651]: I1011 05:10:22.617052 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 11 05:10:22 crc kubenswrapper[4651]: I1011 05:10:22.786174 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8432ef3d-65ef-4602-a0b6-d79d9ae9a73b-combined-ca-bundle\") pod \"8432ef3d-65ef-4602-a0b6-d79d9ae9a73b\" (UID: \"8432ef3d-65ef-4602-a0b6-d79d9ae9a73b\") " Oct 11 05:10:22 crc kubenswrapper[4651]: I1011 05:10:22.786289 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2wq47\" (UniqueName: \"kubernetes.io/projected/8432ef3d-65ef-4602-a0b6-d79d9ae9a73b-kube-api-access-2wq47\") pod \"8432ef3d-65ef-4602-a0b6-d79d9ae9a73b\" (UID: \"8432ef3d-65ef-4602-a0b6-d79d9ae9a73b\") " Oct 11 05:10:22 crc kubenswrapper[4651]: I1011 05:10:22.786380 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8432ef3d-65ef-4602-a0b6-d79d9ae9a73b-config-data\") pod \"8432ef3d-65ef-4602-a0b6-d79d9ae9a73b\" (UID: \"8432ef3d-65ef-4602-a0b6-d79d9ae9a73b\") " Oct 11 05:10:22 crc kubenswrapper[4651]: I1011 05:10:22.791887 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8432ef3d-65ef-4602-a0b6-d79d9ae9a73b-kube-api-access-2wq47" (OuterVolumeSpecName: "kube-api-access-2wq47") pod "8432ef3d-65ef-4602-a0b6-d79d9ae9a73b" (UID: "8432ef3d-65ef-4602-a0b6-d79d9ae9a73b"). InnerVolumeSpecName "kube-api-access-2wq47". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:10:22 crc kubenswrapper[4651]: I1011 05:10:22.824592 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8432ef3d-65ef-4602-a0b6-d79d9ae9a73b-config-data" (OuterVolumeSpecName: "config-data") pod "8432ef3d-65ef-4602-a0b6-d79d9ae9a73b" (UID: "8432ef3d-65ef-4602-a0b6-d79d9ae9a73b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:10:22 crc kubenswrapper[4651]: I1011 05:10:22.825089 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8432ef3d-65ef-4602-a0b6-d79d9ae9a73b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8432ef3d-65ef-4602-a0b6-d79d9ae9a73b" (UID: "8432ef3d-65ef-4602-a0b6-d79d9ae9a73b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:10:22 crc kubenswrapper[4651]: I1011 05:10:22.888351 4651 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8432ef3d-65ef-4602-a0b6-d79d9ae9a73b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 05:10:22 crc kubenswrapper[4651]: I1011 05:10:22.888383 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2wq47\" (UniqueName: \"kubernetes.io/projected/8432ef3d-65ef-4602-a0b6-d79d9ae9a73b-kube-api-access-2wq47\") on node \"crc\" DevicePath \"\"" Oct 11 05:10:22 crc kubenswrapper[4651]: I1011 05:10:22.888394 4651 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8432ef3d-65ef-4602-a0b6-d79d9ae9a73b-config-data\") on node \"crc\" DevicePath \"\"" Oct 11 05:10:23 crc kubenswrapper[4651]: I1011 05:10:23.498655 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 11 05:10:23 crc kubenswrapper[4651]: I1011 05:10:23.543891 4651 generic.go:334] "Generic (PLEG): container finished" podID="8a5d8d3c-139c-4d8b-94ec-38316a4534d3" containerID="ba84dac6a4e13cc66539d24b28b518fdaa622f6434cd3e3f606907765d7286d9" exitCode=0 Oct 11 05:10:23 crc kubenswrapper[4651]: I1011 05:10:23.543959 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 11 05:10:23 crc kubenswrapper[4651]: I1011 05:10:23.543961 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"8a5d8d3c-139c-4d8b-94ec-38316a4534d3","Type":"ContainerDied","Data":"ba84dac6a4e13cc66539d24b28b518fdaa622f6434cd3e3f606907765d7286d9"} Oct 11 05:10:23 crc kubenswrapper[4651]: I1011 05:10:23.543997 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"8a5d8d3c-139c-4d8b-94ec-38316a4534d3","Type":"ContainerDied","Data":"bfe7e0694abab1761a2605212c7cebcb64e3a7c4ca67247ec7bc74b509a1ebcf"} Oct 11 05:10:23 crc kubenswrapper[4651]: I1011 05:10:23.544017 4651 scope.go:117] "RemoveContainer" containerID="ba84dac6a4e13cc66539d24b28b518fdaa622f6434cd3e3f606907765d7286d9" Oct 11 05:10:23 crc kubenswrapper[4651]: I1011 05:10:23.543961 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 11 05:10:23 crc kubenswrapper[4651]: I1011 05:10:23.575370 4651 scope.go:117] "RemoveContainer" containerID="6a98d8d406b9633cc15a7fd907e9f68ce3d2f462fd619613d209a2cd067cf200" Oct 11 05:10:23 crc kubenswrapper[4651]: I1011 05:10:23.616865 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 11 05:10:23 crc kubenswrapper[4651]: I1011 05:10:23.624957 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Oct 11 05:10:23 crc kubenswrapper[4651]: I1011 05:10:23.627711 4651 scope.go:117] "RemoveContainer" containerID="ba84dac6a4e13cc66539d24b28b518fdaa622f6434cd3e3f606907765d7286d9" Oct 11 05:10:23 crc kubenswrapper[4651]: E1011 05:10:23.628267 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ba84dac6a4e13cc66539d24b28b518fdaa622f6434cd3e3f606907765d7286d9\": container with ID starting with ba84dac6a4e13cc66539d24b28b518fdaa622f6434cd3e3f606907765d7286d9 not found: ID does not exist" containerID="ba84dac6a4e13cc66539d24b28b518fdaa622f6434cd3e3f606907765d7286d9" Oct 11 05:10:23 crc kubenswrapper[4651]: I1011 05:10:23.628310 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ba84dac6a4e13cc66539d24b28b518fdaa622f6434cd3e3f606907765d7286d9"} err="failed to get container status \"ba84dac6a4e13cc66539d24b28b518fdaa622f6434cd3e3f606907765d7286d9\": rpc error: code = NotFound desc = could not find container \"ba84dac6a4e13cc66539d24b28b518fdaa622f6434cd3e3f606907765d7286d9\": container with ID starting with ba84dac6a4e13cc66539d24b28b518fdaa622f6434cd3e3f606907765d7286d9 not found: ID does not exist" Oct 11 05:10:23 crc kubenswrapper[4651]: I1011 05:10:23.628337 4651 scope.go:117] "RemoveContainer" containerID="6a98d8d406b9633cc15a7fd907e9f68ce3d2f462fd619613d209a2cd067cf200" Oct 11 05:10:23 crc kubenswrapper[4651]: E1011 05:10:23.628595 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6a98d8d406b9633cc15a7fd907e9f68ce3d2f462fd619613d209a2cd067cf200\": container with ID starting with 6a98d8d406b9633cc15a7fd907e9f68ce3d2f462fd619613d209a2cd067cf200 not found: ID does not exist" containerID="6a98d8d406b9633cc15a7fd907e9f68ce3d2f462fd619613d209a2cd067cf200" Oct 11 05:10:23 crc kubenswrapper[4651]: I1011 05:10:23.628640 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6a98d8d406b9633cc15a7fd907e9f68ce3d2f462fd619613d209a2cd067cf200"} err="failed to get container status \"6a98d8d406b9633cc15a7fd907e9f68ce3d2f462fd619613d209a2cd067cf200\": rpc error: code = NotFound desc = could not find container \"6a98d8d406b9633cc15a7fd907e9f68ce3d2f462fd619613d209a2cd067cf200\": container with ID starting with 6a98d8d406b9633cc15a7fd907e9f68ce3d2f462fd619613d209a2cd067cf200 not found: ID does not exist" Oct 11 05:10:23 crc kubenswrapper[4651]: I1011 05:10:23.631728 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Oct 11 05:10:23 crc kubenswrapper[4651]: E1011 05:10:23.632129 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a5d8d3c-139c-4d8b-94ec-38316a4534d3" containerName="nova-api-log" Oct 11 05:10:23 crc kubenswrapper[4651]: I1011 05:10:23.632145 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a5d8d3c-139c-4d8b-94ec-38316a4534d3" containerName="nova-api-log" Oct 11 05:10:23 crc kubenswrapper[4651]: E1011 05:10:23.632157 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a5d8d3c-139c-4d8b-94ec-38316a4534d3" containerName="nova-api-api" Oct 11 05:10:23 crc kubenswrapper[4651]: I1011 05:10:23.632163 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a5d8d3c-139c-4d8b-94ec-38316a4534d3" containerName="nova-api-api" Oct 11 05:10:23 crc kubenswrapper[4651]: E1011 05:10:23.632186 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8432ef3d-65ef-4602-a0b6-d79d9ae9a73b" containerName="nova-scheduler-scheduler" Oct 11 05:10:23 crc kubenswrapper[4651]: I1011 05:10:23.632192 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="8432ef3d-65ef-4602-a0b6-d79d9ae9a73b" containerName="nova-scheduler-scheduler" Oct 11 05:10:23 crc kubenswrapper[4651]: I1011 05:10:23.632395 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="8432ef3d-65ef-4602-a0b6-d79d9ae9a73b" containerName="nova-scheduler-scheduler" Oct 11 05:10:23 crc kubenswrapper[4651]: I1011 05:10:23.632415 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="8a5d8d3c-139c-4d8b-94ec-38316a4534d3" containerName="nova-api-api" Oct 11 05:10:23 crc kubenswrapper[4651]: I1011 05:10:23.632426 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="8a5d8d3c-139c-4d8b-94ec-38316a4534d3" containerName="nova-api-log" Oct 11 05:10:23 crc kubenswrapper[4651]: I1011 05:10:23.633268 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 11 05:10:23 crc kubenswrapper[4651]: I1011 05:10:23.635036 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Oct 11 05:10:23 crc kubenswrapper[4651]: I1011 05:10:23.641201 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 11 05:10:23 crc kubenswrapper[4651]: I1011 05:10:23.707397 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8a5d8d3c-139c-4d8b-94ec-38316a4534d3-config-data\") pod \"8a5d8d3c-139c-4d8b-94ec-38316a4534d3\" (UID: \"8a5d8d3c-139c-4d8b-94ec-38316a4534d3\") " Oct 11 05:10:23 crc kubenswrapper[4651]: I1011 05:10:23.707434 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8a5d8d3c-139c-4d8b-94ec-38316a4534d3-logs\") pod \"8a5d8d3c-139c-4d8b-94ec-38316a4534d3\" (UID: \"8a5d8d3c-139c-4d8b-94ec-38316a4534d3\") " Oct 11 05:10:23 crc kubenswrapper[4651]: I1011 05:10:23.707475 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a5d8d3c-139c-4d8b-94ec-38316a4534d3-combined-ca-bundle\") pod \"8a5d8d3c-139c-4d8b-94ec-38316a4534d3\" (UID: \"8a5d8d3c-139c-4d8b-94ec-38316a4534d3\") " Oct 11 05:10:23 crc kubenswrapper[4651]: I1011 05:10:23.707646 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pwmhg\" (UniqueName: \"kubernetes.io/projected/8a5d8d3c-139c-4d8b-94ec-38316a4534d3-kube-api-access-pwmhg\") pod \"8a5d8d3c-139c-4d8b-94ec-38316a4534d3\" (UID: \"8a5d8d3c-139c-4d8b-94ec-38316a4534d3\") " Oct 11 05:10:23 crc kubenswrapper[4651]: I1011 05:10:23.708012 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8a5d8d3c-139c-4d8b-94ec-38316a4534d3-logs" (OuterVolumeSpecName: "logs") pod "8a5d8d3c-139c-4d8b-94ec-38316a4534d3" (UID: "8a5d8d3c-139c-4d8b-94ec-38316a4534d3"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 05:10:23 crc kubenswrapper[4651]: I1011 05:10:23.711912 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8a5d8d3c-139c-4d8b-94ec-38316a4534d3-kube-api-access-pwmhg" (OuterVolumeSpecName: "kube-api-access-pwmhg") pod "8a5d8d3c-139c-4d8b-94ec-38316a4534d3" (UID: "8a5d8d3c-139c-4d8b-94ec-38316a4534d3"). InnerVolumeSpecName "kube-api-access-pwmhg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:10:23 crc kubenswrapper[4651]: I1011 05:10:23.731916 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8a5d8d3c-139c-4d8b-94ec-38316a4534d3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8a5d8d3c-139c-4d8b-94ec-38316a4534d3" (UID: "8a5d8d3c-139c-4d8b-94ec-38316a4534d3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:10:23 crc kubenswrapper[4651]: I1011 05:10:23.735328 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8a5d8d3c-139c-4d8b-94ec-38316a4534d3-config-data" (OuterVolumeSpecName: "config-data") pod "8a5d8d3c-139c-4d8b-94ec-38316a4534d3" (UID: "8a5d8d3c-139c-4d8b-94ec-38316a4534d3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:10:23 crc kubenswrapper[4651]: I1011 05:10:23.809963 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pnjfz\" (UniqueName: \"kubernetes.io/projected/df04f80e-387c-40c8-a293-97cf7d3f4c79-kube-api-access-pnjfz\") pod \"nova-scheduler-0\" (UID: \"df04f80e-387c-40c8-a293-97cf7d3f4c79\") " pod="openstack/nova-scheduler-0" Oct 11 05:10:23 crc kubenswrapper[4651]: I1011 05:10:23.810127 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/df04f80e-387c-40c8-a293-97cf7d3f4c79-config-data\") pod \"nova-scheduler-0\" (UID: \"df04f80e-387c-40c8-a293-97cf7d3f4c79\") " pod="openstack/nova-scheduler-0" Oct 11 05:10:23 crc kubenswrapper[4651]: I1011 05:10:23.810187 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df04f80e-387c-40c8-a293-97cf7d3f4c79-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"df04f80e-387c-40c8-a293-97cf7d3f4c79\") " pod="openstack/nova-scheduler-0" Oct 11 05:10:23 crc kubenswrapper[4651]: I1011 05:10:23.811026 4651 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8a5d8d3c-139c-4d8b-94ec-38316a4534d3-config-data\") on node \"crc\" DevicePath \"\"" Oct 11 05:10:23 crc kubenswrapper[4651]: I1011 05:10:23.811061 4651 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8a5d8d3c-139c-4d8b-94ec-38316a4534d3-logs\") on node \"crc\" DevicePath \"\"" Oct 11 05:10:23 crc kubenswrapper[4651]: I1011 05:10:23.811071 4651 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a5d8d3c-139c-4d8b-94ec-38316a4534d3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 05:10:23 crc kubenswrapper[4651]: I1011 05:10:23.811082 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pwmhg\" (UniqueName: \"kubernetes.io/projected/8a5d8d3c-139c-4d8b-94ec-38316a4534d3-kube-api-access-pwmhg\") on node \"crc\" DevicePath \"\"" Oct 11 05:10:23 crc kubenswrapper[4651]: I1011 05:10:23.840566 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 11 05:10:23 crc kubenswrapper[4651]: I1011 05:10:23.840887 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 11 05:10:23 crc kubenswrapper[4651]: I1011 05:10:23.892149 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8432ef3d-65ef-4602-a0b6-d79d9ae9a73b" path="/var/lib/kubelet/pods/8432ef3d-65ef-4602-a0b6-d79d9ae9a73b/volumes" Oct 11 05:10:23 crc kubenswrapper[4651]: I1011 05:10:23.892862 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 11 05:10:23 crc kubenswrapper[4651]: I1011 05:10:23.894085 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Oct 11 05:10:23 crc kubenswrapper[4651]: I1011 05:10:23.902598 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Oct 11 05:10:23 crc kubenswrapper[4651]: I1011 05:10:23.904455 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 11 05:10:23 crc kubenswrapper[4651]: I1011 05:10:23.908022 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Oct 11 05:10:23 crc kubenswrapper[4651]: I1011 05:10:23.918526 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/df04f80e-387c-40c8-a293-97cf7d3f4c79-config-data\") pod \"nova-scheduler-0\" (UID: \"df04f80e-387c-40c8-a293-97cf7d3f4c79\") " pod="openstack/nova-scheduler-0" Oct 11 05:10:23 crc kubenswrapper[4651]: I1011 05:10:23.918783 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df04f80e-387c-40c8-a293-97cf7d3f4c79-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"df04f80e-387c-40c8-a293-97cf7d3f4c79\") " pod="openstack/nova-scheduler-0" Oct 11 05:10:23 crc kubenswrapper[4651]: I1011 05:10:23.919079 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pnjfz\" (UniqueName: \"kubernetes.io/projected/df04f80e-387c-40c8-a293-97cf7d3f4c79-kube-api-access-pnjfz\") pod \"nova-scheduler-0\" (UID: \"df04f80e-387c-40c8-a293-97cf7d3f4c79\") " pod="openstack/nova-scheduler-0" Oct 11 05:10:23 crc kubenswrapper[4651]: I1011 05:10:23.922981 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df04f80e-387c-40c8-a293-97cf7d3f4c79-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"df04f80e-387c-40c8-a293-97cf7d3f4c79\") " pod="openstack/nova-scheduler-0" Oct 11 05:10:23 crc kubenswrapper[4651]: I1011 05:10:23.923467 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/df04f80e-387c-40c8-a293-97cf7d3f4c79-config-data\") pod \"nova-scheduler-0\" (UID: \"df04f80e-387c-40c8-a293-97cf7d3f4c79\") " pod="openstack/nova-scheduler-0" Oct 11 05:10:23 crc kubenswrapper[4651]: I1011 05:10:23.930335 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 11 05:10:23 crc kubenswrapper[4651]: I1011 05:10:23.936267 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pnjfz\" (UniqueName: \"kubernetes.io/projected/df04f80e-387c-40c8-a293-97cf7d3f4c79-kube-api-access-pnjfz\") pod \"nova-scheduler-0\" (UID: \"df04f80e-387c-40c8-a293-97cf7d3f4c79\") " pod="openstack/nova-scheduler-0" Oct 11 05:10:23 crc kubenswrapper[4651]: I1011 05:10:23.955557 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 11 05:10:24 crc kubenswrapper[4651]: I1011 05:10:24.020600 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b408a1bd-6065-42a6-bf31-eebef8ad3452-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"b408a1bd-6065-42a6-bf31-eebef8ad3452\") " pod="openstack/nova-api-0" Oct 11 05:10:24 crc kubenswrapper[4651]: I1011 05:10:24.020997 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b408a1bd-6065-42a6-bf31-eebef8ad3452-config-data\") pod \"nova-api-0\" (UID: \"b408a1bd-6065-42a6-bf31-eebef8ad3452\") " pod="openstack/nova-api-0" Oct 11 05:10:24 crc kubenswrapper[4651]: I1011 05:10:24.021064 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rl727\" (UniqueName: \"kubernetes.io/projected/b408a1bd-6065-42a6-bf31-eebef8ad3452-kube-api-access-rl727\") pod \"nova-api-0\" (UID: \"b408a1bd-6065-42a6-bf31-eebef8ad3452\") " pod="openstack/nova-api-0" Oct 11 05:10:24 crc kubenswrapper[4651]: I1011 05:10:24.021799 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b408a1bd-6065-42a6-bf31-eebef8ad3452-logs\") pod \"nova-api-0\" (UID: \"b408a1bd-6065-42a6-bf31-eebef8ad3452\") " pod="openstack/nova-api-0" Oct 11 05:10:24 crc kubenswrapper[4651]: I1011 05:10:24.123206 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b408a1bd-6065-42a6-bf31-eebef8ad3452-config-data\") pod \"nova-api-0\" (UID: \"b408a1bd-6065-42a6-bf31-eebef8ad3452\") " pod="openstack/nova-api-0" Oct 11 05:10:24 crc kubenswrapper[4651]: I1011 05:10:24.123288 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rl727\" (UniqueName: \"kubernetes.io/projected/b408a1bd-6065-42a6-bf31-eebef8ad3452-kube-api-access-rl727\") pod \"nova-api-0\" (UID: \"b408a1bd-6065-42a6-bf31-eebef8ad3452\") " pod="openstack/nova-api-0" Oct 11 05:10:24 crc kubenswrapper[4651]: I1011 05:10:24.123406 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b408a1bd-6065-42a6-bf31-eebef8ad3452-logs\") pod \"nova-api-0\" (UID: \"b408a1bd-6065-42a6-bf31-eebef8ad3452\") " pod="openstack/nova-api-0" Oct 11 05:10:24 crc kubenswrapper[4651]: I1011 05:10:24.123488 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b408a1bd-6065-42a6-bf31-eebef8ad3452-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"b408a1bd-6065-42a6-bf31-eebef8ad3452\") " pod="openstack/nova-api-0" Oct 11 05:10:24 crc kubenswrapper[4651]: I1011 05:10:24.123945 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b408a1bd-6065-42a6-bf31-eebef8ad3452-logs\") pod \"nova-api-0\" (UID: \"b408a1bd-6065-42a6-bf31-eebef8ad3452\") " pod="openstack/nova-api-0" Oct 11 05:10:24 crc kubenswrapper[4651]: I1011 05:10:24.129579 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b408a1bd-6065-42a6-bf31-eebef8ad3452-config-data\") pod \"nova-api-0\" (UID: \"b408a1bd-6065-42a6-bf31-eebef8ad3452\") " pod="openstack/nova-api-0" Oct 11 05:10:24 crc kubenswrapper[4651]: I1011 05:10:24.129656 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b408a1bd-6065-42a6-bf31-eebef8ad3452-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"b408a1bd-6065-42a6-bf31-eebef8ad3452\") " pod="openstack/nova-api-0" Oct 11 05:10:24 crc kubenswrapper[4651]: I1011 05:10:24.143644 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rl727\" (UniqueName: \"kubernetes.io/projected/b408a1bd-6065-42a6-bf31-eebef8ad3452-kube-api-access-rl727\") pod \"nova-api-0\" (UID: \"b408a1bd-6065-42a6-bf31-eebef8ad3452\") " pod="openstack/nova-api-0" Oct 11 05:10:24 crc kubenswrapper[4651]: I1011 05:10:24.235446 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 11 05:10:24 crc kubenswrapper[4651]: I1011 05:10:24.412578 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 11 05:10:24 crc kubenswrapper[4651]: I1011 05:10:24.555753 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"df04f80e-387c-40c8-a293-97cf7d3f4c79","Type":"ContainerStarted","Data":"bda0e32b9cbeb3457310dc04dd68d4b4a2ac2678c2a966a1e3eb31f2ab85a05f"} Oct 11 05:10:24 crc kubenswrapper[4651]: I1011 05:10:24.642271 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 11 05:10:25 crc kubenswrapper[4651]: I1011 05:10:25.565628 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"df04f80e-387c-40c8-a293-97cf7d3f4c79","Type":"ContainerStarted","Data":"531973352bb051d3570df2038989aef5dc001e82e263fce7d939afcff1e3e6c7"} Oct 11 05:10:25 crc kubenswrapper[4651]: I1011 05:10:25.567492 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b408a1bd-6065-42a6-bf31-eebef8ad3452","Type":"ContainerStarted","Data":"f0b28a78eb28df817bee1c747a80b9adc90d39337615f1fa89d1ed491b1d3293"} Oct 11 05:10:25 crc kubenswrapper[4651]: I1011 05:10:25.567575 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b408a1bd-6065-42a6-bf31-eebef8ad3452","Type":"ContainerStarted","Data":"912b659cdc950dcf0e19edd694bfe4dd9bf7f6191b9d307bf0b8d781734b7383"} Oct 11 05:10:25 crc kubenswrapper[4651]: I1011 05:10:25.567603 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b408a1bd-6065-42a6-bf31-eebef8ad3452","Type":"ContainerStarted","Data":"dfbd19b00304cd514f643d0bda6a1d0eaa6ed405fd6a061696c5c8d0bd0aed2a"} Oct 11 05:10:25 crc kubenswrapper[4651]: I1011 05:10:25.597354 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.596700184 podStartE2EDuration="2.596700184s" podCreationTimestamp="2025-10-11 05:10:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 05:10:25.580408498 +0000 UTC m=+1146.476641304" watchObservedRunningTime="2025-10-11 05:10:25.596700184 +0000 UTC m=+1146.492932980" Oct 11 05:10:25 crc kubenswrapper[4651]: I1011 05:10:25.610813 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.610787545 podStartE2EDuration="2.610787545s" podCreationTimestamp="2025-10-11 05:10:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 05:10:25.603840582 +0000 UTC m=+1146.500073388" watchObservedRunningTime="2025-10-11 05:10:25.610787545 +0000 UTC m=+1146.507020371" Oct 11 05:10:25 crc kubenswrapper[4651]: I1011 05:10:25.897854 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8a5d8d3c-139c-4d8b-94ec-38316a4534d3" path="/var/lib/kubelet/pods/8a5d8d3c-139c-4d8b-94ec-38316a4534d3/volumes" Oct 11 05:10:27 crc kubenswrapper[4651]: I1011 05:10:27.957540 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Oct 11 05:10:28 crc kubenswrapper[4651]: I1011 05:10:28.841457 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 11 05:10:28 crc kubenswrapper[4651]: I1011 05:10:28.841539 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 11 05:10:28 crc kubenswrapper[4651]: I1011 05:10:28.956663 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Oct 11 05:10:29 crc kubenswrapper[4651]: I1011 05:10:29.853994 4651 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="29a549a6-e4d5-41c6-8042-d6a88f17fa94" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.198:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 11 05:10:29 crc kubenswrapper[4651]: I1011 05:10:29.854010 4651 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="29a549a6-e4d5-41c6-8042-d6a88f17fa94" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.198:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 11 05:10:33 crc kubenswrapper[4651]: I1011 05:10:33.956398 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Oct 11 05:10:34 crc kubenswrapper[4651]: I1011 05:10:34.004018 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Oct 11 05:10:34 crc kubenswrapper[4651]: I1011 05:10:34.236455 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 11 05:10:34 crc kubenswrapper[4651]: I1011 05:10:34.236551 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 11 05:10:34 crc kubenswrapper[4651]: I1011 05:10:34.753295 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Oct 11 05:10:35 crc kubenswrapper[4651]: I1011 05:10:35.278152 4651 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="b408a1bd-6065-42a6-bf31-eebef8ad3452" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.200:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 11 05:10:35 crc kubenswrapper[4651]: I1011 05:10:35.278454 4651 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="b408a1bd-6065-42a6-bf31-eebef8ad3452" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.200:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 11 05:10:38 crc kubenswrapper[4651]: I1011 05:10:38.846429 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Oct 11 05:10:38 crc kubenswrapper[4651]: I1011 05:10:38.848502 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Oct 11 05:10:38 crc kubenswrapper[4651]: I1011 05:10:38.858461 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Oct 11 05:10:39 crc kubenswrapper[4651]: I1011 05:10:39.768793 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Oct 11 05:10:41 crc kubenswrapper[4651]: I1011 05:10:41.682158 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 11 05:10:41 crc kubenswrapper[4651]: I1011 05:10:41.758966 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dmzvg\" (UniqueName: \"kubernetes.io/projected/bce169c0-d36f-4fd1-b42f-9e2b481789b1-kube-api-access-dmzvg\") pod \"bce169c0-d36f-4fd1-b42f-9e2b481789b1\" (UID: \"bce169c0-d36f-4fd1-b42f-9e2b481789b1\") " Oct 11 05:10:41 crc kubenswrapper[4651]: I1011 05:10:41.759583 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bce169c0-d36f-4fd1-b42f-9e2b481789b1-config-data\") pod \"bce169c0-d36f-4fd1-b42f-9e2b481789b1\" (UID: \"bce169c0-d36f-4fd1-b42f-9e2b481789b1\") " Oct 11 05:10:41 crc kubenswrapper[4651]: I1011 05:10:41.759652 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bce169c0-d36f-4fd1-b42f-9e2b481789b1-combined-ca-bundle\") pod \"bce169c0-d36f-4fd1-b42f-9e2b481789b1\" (UID: \"bce169c0-d36f-4fd1-b42f-9e2b481789b1\") " Oct 11 05:10:41 crc kubenswrapper[4651]: I1011 05:10:41.766769 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bce169c0-d36f-4fd1-b42f-9e2b481789b1-kube-api-access-dmzvg" (OuterVolumeSpecName: "kube-api-access-dmzvg") pod "bce169c0-d36f-4fd1-b42f-9e2b481789b1" (UID: "bce169c0-d36f-4fd1-b42f-9e2b481789b1"). InnerVolumeSpecName "kube-api-access-dmzvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:10:41 crc kubenswrapper[4651]: I1011 05:10:41.782103 4651 generic.go:334] "Generic (PLEG): container finished" podID="bce169c0-d36f-4fd1-b42f-9e2b481789b1" containerID="3a3faff201d518822a82a72637f86b67c2e0f1f60f2c87badb99768d9390b5ec" exitCode=137 Oct 11 05:10:41 crc kubenswrapper[4651]: I1011 05:10:41.782215 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"bce169c0-d36f-4fd1-b42f-9e2b481789b1","Type":"ContainerDied","Data":"3a3faff201d518822a82a72637f86b67c2e0f1f60f2c87badb99768d9390b5ec"} Oct 11 05:10:41 crc kubenswrapper[4651]: I1011 05:10:41.782246 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 11 05:10:41 crc kubenswrapper[4651]: I1011 05:10:41.782372 4651 scope.go:117] "RemoveContainer" containerID="3a3faff201d518822a82a72637f86b67c2e0f1f60f2c87badb99768d9390b5ec" Oct 11 05:10:41 crc kubenswrapper[4651]: I1011 05:10:41.782343 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"bce169c0-d36f-4fd1-b42f-9e2b481789b1","Type":"ContainerDied","Data":"89df9807363cf0b8af55d3063e00cf3dd65f618be0ef5da3e2259c5a65fb2dd5"} Oct 11 05:10:41 crc kubenswrapper[4651]: I1011 05:10:41.806861 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bce169c0-d36f-4fd1-b42f-9e2b481789b1-config-data" (OuterVolumeSpecName: "config-data") pod "bce169c0-d36f-4fd1-b42f-9e2b481789b1" (UID: "bce169c0-d36f-4fd1-b42f-9e2b481789b1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:10:41 crc kubenswrapper[4651]: I1011 05:10:41.808099 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bce169c0-d36f-4fd1-b42f-9e2b481789b1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bce169c0-d36f-4fd1-b42f-9e2b481789b1" (UID: "bce169c0-d36f-4fd1-b42f-9e2b481789b1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:10:41 crc kubenswrapper[4651]: I1011 05:10:41.863111 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dmzvg\" (UniqueName: \"kubernetes.io/projected/bce169c0-d36f-4fd1-b42f-9e2b481789b1-kube-api-access-dmzvg\") on node \"crc\" DevicePath \"\"" Oct 11 05:10:41 crc kubenswrapper[4651]: I1011 05:10:41.863149 4651 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bce169c0-d36f-4fd1-b42f-9e2b481789b1-config-data\") on node \"crc\" DevicePath \"\"" Oct 11 05:10:41 crc kubenswrapper[4651]: I1011 05:10:41.863162 4651 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bce169c0-d36f-4fd1-b42f-9e2b481789b1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 05:10:41 crc kubenswrapper[4651]: I1011 05:10:41.873197 4651 scope.go:117] "RemoveContainer" containerID="3a3faff201d518822a82a72637f86b67c2e0f1f60f2c87badb99768d9390b5ec" Oct 11 05:10:41 crc kubenswrapper[4651]: E1011 05:10:41.873910 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3a3faff201d518822a82a72637f86b67c2e0f1f60f2c87badb99768d9390b5ec\": container with ID starting with 3a3faff201d518822a82a72637f86b67c2e0f1f60f2c87badb99768d9390b5ec not found: ID does not exist" containerID="3a3faff201d518822a82a72637f86b67c2e0f1f60f2c87badb99768d9390b5ec" Oct 11 05:10:41 crc kubenswrapper[4651]: I1011 05:10:41.873961 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3a3faff201d518822a82a72637f86b67c2e0f1f60f2c87badb99768d9390b5ec"} err="failed to get container status \"3a3faff201d518822a82a72637f86b67c2e0f1f60f2c87badb99768d9390b5ec\": rpc error: code = NotFound desc = could not find container \"3a3faff201d518822a82a72637f86b67c2e0f1f60f2c87badb99768d9390b5ec\": container with ID starting with 3a3faff201d518822a82a72637f86b67c2e0f1f60f2c87badb99768d9390b5ec not found: ID does not exist" Oct 11 05:10:42 crc kubenswrapper[4651]: I1011 05:10:42.116152 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 11 05:10:42 crc kubenswrapper[4651]: I1011 05:10:42.133386 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 11 05:10:42 crc kubenswrapper[4651]: I1011 05:10:42.143315 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 11 05:10:42 crc kubenswrapper[4651]: E1011 05:10:42.144017 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bce169c0-d36f-4fd1-b42f-9e2b481789b1" containerName="nova-cell1-novncproxy-novncproxy" Oct 11 05:10:42 crc kubenswrapper[4651]: I1011 05:10:42.144052 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="bce169c0-d36f-4fd1-b42f-9e2b481789b1" containerName="nova-cell1-novncproxy-novncproxy" Oct 11 05:10:42 crc kubenswrapper[4651]: I1011 05:10:42.144430 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="bce169c0-d36f-4fd1-b42f-9e2b481789b1" containerName="nova-cell1-novncproxy-novncproxy" Oct 11 05:10:42 crc kubenswrapper[4651]: I1011 05:10:42.145437 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 11 05:10:42 crc kubenswrapper[4651]: I1011 05:10:42.149510 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Oct 11 05:10:42 crc kubenswrapper[4651]: I1011 05:10:42.149686 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Oct 11 05:10:42 crc kubenswrapper[4651]: I1011 05:10:42.152465 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 11 05:10:42 crc kubenswrapper[4651]: I1011 05:10:42.152578 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Oct 11 05:10:42 crc kubenswrapper[4651]: I1011 05:10:42.170007 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d986aa9a-c031-4a99-b2d8-6c09be2fc264-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"d986aa9a-c031-4a99-b2d8-6c09be2fc264\") " pod="openstack/nova-cell1-novncproxy-0" Oct 11 05:10:42 crc kubenswrapper[4651]: I1011 05:10:42.170095 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/d986aa9a-c031-4a99-b2d8-6c09be2fc264-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"d986aa9a-c031-4a99-b2d8-6c09be2fc264\") " pod="openstack/nova-cell1-novncproxy-0" Oct 11 05:10:42 crc kubenswrapper[4651]: I1011 05:10:42.170780 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d986aa9a-c031-4a99-b2d8-6c09be2fc264-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"d986aa9a-c031-4a99-b2d8-6c09be2fc264\") " pod="openstack/nova-cell1-novncproxy-0" Oct 11 05:10:42 crc kubenswrapper[4651]: I1011 05:10:42.171039 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5kts4\" (UniqueName: \"kubernetes.io/projected/d986aa9a-c031-4a99-b2d8-6c09be2fc264-kube-api-access-5kts4\") pod \"nova-cell1-novncproxy-0\" (UID: \"d986aa9a-c031-4a99-b2d8-6c09be2fc264\") " pod="openstack/nova-cell1-novncproxy-0" Oct 11 05:10:42 crc kubenswrapper[4651]: I1011 05:10:42.171457 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/d986aa9a-c031-4a99-b2d8-6c09be2fc264-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"d986aa9a-c031-4a99-b2d8-6c09be2fc264\") " pod="openstack/nova-cell1-novncproxy-0" Oct 11 05:10:42 crc kubenswrapper[4651]: I1011 05:10:42.272062 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/d986aa9a-c031-4a99-b2d8-6c09be2fc264-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"d986aa9a-c031-4a99-b2d8-6c09be2fc264\") " pod="openstack/nova-cell1-novncproxy-0" Oct 11 05:10:42 crc kubenswrapper[4651]: I1011 05:10:42.272112 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d986aa9a-c031-4a99-b2d8-6c09be2fc264-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"d986aa9a-c031-4a99-b2d8-6c09be2fc264\") " pod="openstack/nova-cell1-novncproxy-0" Oct 11 05:10:42 crc kubenswrapper[4651]: I1011 05:10:42.272168 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5kts4\" (UniqueName: \"kubernetes.io/projected/d986aa9a-c031-4a99-b2d8-6c09be2fc264-kube-api-access-5kts4\") pod \"nova-cell1-novncproxy-0\" (UID: \"d986aa9a-c031-4a99-b2d8-6c09be2fc264\") " pod="openstack/nova-cell1-novncproxy-0" Oct 11 05:10:42 crc kubenswrapper[4651]: I1011 05:10:42.272196 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/d986aa9a-c031-4a99-b2d8-6c09be2fc264-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"d986aa9a-c031-4a99-b2d8-6c09be2fc264\") " pod="openstack/nova-cell1-novncproxy-0" Oct 11 05:10:42 crc kubenswrapper[4651]: I1011 05:10:42.272239 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d986aa9a-c031-4a99-b2d8-6c09be2fc264-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"d986aa9a-c031-4a99-b2d8-6c09be2fc264\") " pod="openstack/nova-cell1-novncproxy-0" Oct 11 05:10:42 crc kubenswrapper[4651]: I1011 05:10:42.276690 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/d986aa9a-c031-4a99-b2d8-6c09be2fc264-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"d986aa9a-c031-4a99-b2d8-6c09be2fc264\") " pod="openstack/nova-cell1-novncproxy-0" Oct 11 05:10:42 crc kubenswrapper[4651]: I1011 05:10:42.276839 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d986aa9a-c031-4a99-b2d8-6c09be2fc264-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"d986aa9a-c031-4a99-b2d8-6c09be2fc264\") " pod="openstack/nova-cell1-novncproxy-0" Oct 11 05:10:42 crc kubenswrapper[4651]: I1011 05:10:42.277426 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d986aa9a-c031-4a99-b2d8-6c09be2fc264-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"d986aa9a-c031-4a99-b2d8-6c09be2fc264\") " pod="openstack/nova-cell1-novncproxy-0" Oct 11 05:10:42 crc kubenswrapper[4651]: I1011 05:10:42.284440 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/d986aa9a-c031-4a99-b2d8-6c09be2fc264-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"d986aa9a-c031-4a99-b2d8-6c09be2fc264\") " pod="openstack/nova-cell1-novncproxy-0" Oct 11 05:10:42 crc kubenswrapper[4651]: I1011 05:10:42.290971 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5kts4\" (UniqueName: \"kubernetes.io/projected/d986aa9a-c031-4a99-b2d8-6c09be2fc264-kube-api-access-5kts4\") pod \"nova-cell1-novncproxy-0\" (UID: \"d986aa9a-c031-4a99-b2d8-6c09be2fc264\") " pod="openstack/nova-cell1-novncproxy-0" Oct 11 05:10:42 crc kubenswrapper[4651]: I1011 05:10:42.476112 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 11 05:10:42 crc kubenswrapper[4651]: I1011 05:10:42.969621 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 11 05:10:43 crc kubenswrapper[4651]: I1011 05:10:43.812082 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"d986aa9a-c031-4a99-b2d8-6c09be2fc264","Type":"ContainerStarted","Data":"d5be15e95b9aabc7acdf6119dbc1c87869827727fbb3e21bcce745abdeb36d79"} Oct 11 05:10:43 crc kubenswrapper[4651]: I1011 05:10:43.812365 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"d986aa9a-c031-4a99-b2d8-6c09be2fc264","Type":"ContainerStarted","Data":"fefc3ebf20d9812c1470b812b80ead7e00eded08b8cbe3557ef0036b9fc5a08c"} Oct 11 05:10:43 crc kubenswrapper[4651]: I1011 05:10:43.842079 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=1.842063103 podStartE2EDuration="1.842063103s" podCreationTimestamp="2025-10-11 05:10:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 05:10:43.828370752 +0000 UTC m=+1164.724603578" watchObservedRunningTime="2025-10-11 05:10:43.842063103 +0000 UTC m=+1164.738295899" Oct 11 05:10:43 crc kubenswrapper[4651]: I1011 05:10:43.881252 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bce169c0-d36f-4fd1-b42f-9e2b481789b1" path="/var/lib/kubelet/pods/bce169c0-d36f-4fd1-b42f-9e2b481789b1/volumes" Oct 11 05:10:44 crc kubenswrapper[4651]: I1011 05:10:44.239765 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Oct 11 05:10:44 crc kubenswrapper[4651]: I1011 05:10:44.239962 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Oct 11 05:10:44 crc kubenswrapper[4651]: I1011 05:10:44.240469 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Oct 11 05:10:44 crc kubenswrapper[4651]: I1011 05:10:44.240505 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Oct 11 05:10:44 crc kubenswrapper[4651]: I1011 05:10:44.242868 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Oct 11 05:10:44 crc kubenswrapper[4651]: I1011 05:10:44.243800 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Oct 11 05:10:44 crc kubenswrapper[4651]: I1011 05:10:44.474190 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5c7b6c5df9-twwgp"] Oct 11 05:10:44 crc kubenswrapper[4651]: I1011 05:10:44.476311 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c7b6c5df9-twwgp" Oct 11 05:10:44 crc kubenswrapper[4651]: I1011 05:10:44.500965 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c7b6c5df9-twwgp"] Oct 11 05:10:44 crc kubenswrapper[4651]: I1011 05:10:44.531295 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/07814860-7a0a-48b8-996c-3472c44897f3-ovsdbserver-nb\") pod \"dnsmasq-dns-5c7b6c5df9-twwgp\" (UID: \"07814860-7a0a-48b8-996c-3472c44897f3\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-twwgp" Oct 11 05:10:44 crc kubenswrapper[4651]: I1011 05:10:44.531341 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/07814860-7a0a-48b8-996c-3472c44897f3-config\") pod \"dnsmasq-dns-5c7b6c5df9-twwgp\" (UID: \"07814860-7a0a-48b8-996c-3472c44897f3\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-twwgp" Oct 11 05:10:44 crc kubenswrapper[4651]: I1011 05:10:44.531385 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5jlrz\" (UniqueName: \"kubernetes.io/projected/07814860-7a0a-48b8-996c-3472c44897f3-kube-api-access-5jlrz\") pod \"dnsmasq-dns-5c7b6c5df9-twwgp\" (UID: \"07814860-7a0a-48b8-996c-3472c44897f3\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-twwgp" Oct 11 05:10:44 crc kubenswrapper[4651]: I1011 05:10:44.531409 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/07814860-7a0a-48b8-996c-3472c44897f3-ovsdbserver-sb\") pod \"dnsmasq-dns-5c7b6c5df9-twwgp\" (UID: \"07814860-7a0a-48b8-996c-3472c44897f3\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-twwgp" Oct 11 05:10:44 crc kubenswrapper[4651]: I1011 05:10:44.531468 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/07814860-7a0a-48b8-996c-3472c44897f3-dns-svc\") pod \"dnsmasq-dns-5c7b6c5df9-twwgp\" (UID: \"07814860-7a0a-48b8-996c-3472c44897f3\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-twwgp" Oct 11 05:10:44 crc kubenswrapper[4651]: I1011 05:10:44.531499 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/07814860-7a0a-48b8-996c-3472c44897f3-dns-swift-storage-0\") pod \"dnsmasq-dns-5c7b6c5df9-twwgp\" (UID: \"07814860-7a0a-48b8-996c-3472c44897f3\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-twwgp" Oct 11 05:10:44 crc kubenswrapper[4651]: I1011 05:10:44.632282 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/07814860-7a0a-48b8-996c-3472c44897f3-ovsdbserver-nb\") pod \"dnsmasq-dns-5c7b6c5df9-twwgp\" (UID: \"07814860-7a0a-48b8-996c-3472c44897f3\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-twwgp" Oct 11 05:10:44 crc kubenswrapper[4651]: I1011 05:10:44.632320 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/07814860-7a0a-48b8-996c-3472c44897f3-config\") pod \"dnsmasq-dns-5c7b6c5df9-twwgp\" (UID: \"07814860-7a0a-48b8-996c-3472c44897f3\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-twwgp" Oct 11 05:10:44 crc kubenswrapper[4651]: I1011 05:10:44.632360 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5jlrz\" (UniqueName: \"kubernetes.io/projected/07814860-7a0a-48b8-996c-3472c44897f3-kube-api-access-5jlrz\") pod \"dnsmasq-dns-5c7b6c5df9-twwgp\" (UID: \"07814860-7a0a-48b8-996c-3472c44897f3\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-twwgp" Oct 11 05:10:44 crc kubenswrapper[4651]: I1011 05:10:44.632381 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/07814860-7a0a-48b8-996c-3472c44897f3-ovsdbserver-sb\") pod \"dnsmasq-dns-5c7b6c5df9-twwgp\" (UID: \"07814860-7a0a-48b8-996c-3472c44897f3\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-twwgp" Oct 11 05:10:44 crc kubenswrapper[4651]: I1011 05:10:44.632438 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/07814860-7a0a-48b8-996c-3472c44897f3-dns-svc\") pod \"dnsmasq-dns-5c7b6c5df9-twwgp\" (UID: \"07814860-7a0a-48b8-996c-3472c44897f3\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-twwgp" Oct 11 05:10:44 crc kubenswrapper[4651]: I1011 05:10:44.632466 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/07814860-7a0a-48b8-996c-3472c44897f3-dns-swift-storage-0\") pod \"dnsmasq-dns-5c7b6c5df9-twwgp\" (UID: \"07814860-7a0a-48b8-996c-3472c44897f3\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-twwgp" Oct 11 05:10:44 crc kubenswrapper[4651]: I1011 05:10:44.633448 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/07814860-7a0a-48b8-996c-3472c44897f3-config\") pod \"dnsmasq-dns-5c7b6c5df9-twwgp\" (UID: \"07814860-7a0a-48b8-996c-3472c44897f3\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-twwgp" Oct 11 05:10:44 crc kubenswrapper[4651]: I1011 05:10:44.633474 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/07814860-7a0a-48b8-996c-3472c44897f3-dns-svc\") pod \"dnsmasq-dns-5c7b6c5df9-twwgp\" (UID: \"07814860-7a0a-48b8-996c-3472c44897f3\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-twwgp" Oct 11 05:10:44 crc kubenswrapper[4651]: I1011 05:10:44.633741 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/07814860-7a0a-48b8-996c-3472c44897f3-ovsdbserver-nb\") pod \"dnsmasq-dns-5c7b6c5df9-twwgp\" (UID: \"07814860-7a0a-48b8-996c-3472c44897f3\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-twwgp" Oct 11 05:10:44 crc kubenswrapper[4651]: I1011 05:10:44.634035 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/07814860-7a0a-48b8-996c-3472c44897f3-dns-swift-storage-0\") pod \"dnsmasq-dns-5c7b6c5df9-twwgp\" (UID: \"07814860-7a0a-48b8-996c-3472c44897f3\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-twwgp" Oct 11 05:10:44 crc kubenswrapper[4651]: I1011 05:10:44.634179 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/07814860-7a0a-48b8-996c-3472c44897f3-ovsdbserver-sb\") pod \"dnsmasq-dns-5c7b6c5df9-twwgp\" (UID: \"07814860-7a0a-48b8-996c-3472c44897f3\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-twwgp" Oct 11 05:10:44 crc kubenswrapper[4651]: I1011 05:10:44.652265 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5jlrz\" (UniqueName: \"kubernetes.io/projected/07814860-7a0a-48b8-996c-3472c44897f3-kube-api-access-5jlrz\") pod \"dnsmasq-dns-5c7b6c5df9-twwgp\" (UID: \"07814860-7a0a-48b8-996c-3472c44897f3\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-twwgp" Oct 11 05:10:44 crc kubenswrapper[4651]: I1011 05:10:44.809283 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c7b6c5df9-twwgp" Oct 11 05:10:45 crc kubenswrapper[4651]: W1011 05:10:45.327627 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod07814860_7a0a_48b8_996c_3472c44897f3.slice/crio-1a23fde6865598f59760ca1e3ffd6785ced61c4b9c3a60b71def15fbf5dd927b WatchSource:0}: Error finding container 1a23fde6865598f59760ca1e3ffd6785ced61c4b9c3a60b71def15fbf5dd927b: Status 404 returned error can't find the container with id 1a23fde6865598f59760ca1e3ffd6785ced61c4b9c3a60b71def15fbf5dd927b Oct 11 05:10:45 crc kubenswrapper[4651]: I1011 05:10:45.329229 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c7b6c5df9-twwgp"] Oct 11 05:10:45 crc kubenswrapper[4651]: I1011 05:10:45.852459 4651 generic.go:334] "Generic (PLEG): container finished" podID="07814860-7a0a-48b8-996c-3472c44897f3" containerID="163f2d78f7594707b6a04858138c9ceeff8c916ecef9fd81cc8cd62a5e84ea50" exitCode=0 Oct 11 05:10:45 crc kubenswrapper[4651]: I1011 05:10:45.852554 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c7b6c5df9-twwgp" event={"ID":"07814860-7a0a-48b8-996c-3472c44897f3","Type":"ContainerDied","Data":"163f2d78f7594707b6a04858138c9ceeff8c916ecef9fd81cc8cd62a5e84ea50"} Oct 11 05:10:45 crc kubenswrapper[4651]: I1011 05:10:45.852898 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c7b6c5df9-twwgp" event={"ID":"07814860-7a0a-48b8-996c-3472c44897f3","Type":"ContainerStarted","Data":"1a23fde6865598f59760ca1e3ffd6785ced61c4b9c3a60b71def15fbf5dd927b"} Oct 11 05:10:46 crc kubenswrapper[4651]: I1011 05:10:46.309967 4651 patch_prober.go:28] interesting pod/machine-config-daemon-78jnv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 05:10:46 crc kubenswrapper[4651]: I1011 05:10:46.310021 4651 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 05:10:46 crc kubenswrapper[4651]: I1011 05:10:46.310063 4651 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" Oct 11 05:10:46 crc kubenswrapper[4651]: I1011 05:10:46.482137 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 11 05:10:46 crc kubenswrapper[4651]: I1011 05:10:46.482419 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="074c84b3-f777-4c05-89dc-6e55dd72a2b9" containerName="ceilometer-central-agent" containerID="cri-o://219dd73522998cd618c6f9c9561803b95ff3a4b743770fdda950016bd0acd214" gracePeriod=30 Oct 11 05:10:46 crc kubenswrapper[4651]: I1011 05:10:46.482565 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="074c84b3-f777-4c05-89dc-6e55dd72a2b9" containerName="proxy-httpd" containerID="cri-o://7ded35d600cb77aff8b1b70082f2d923aa6ae14eff4a18e1b7fd0795b00c2f5f" gracePeriod=30 Oct 11 05:10:46 crc kubenswrapper[4651]: I1011 05:10:46.482631 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="074c84b3-f777-4c05-89dc-6e55dd72a2b9" containerName="ceilometer-notification-agent" containerID="cri-o://2a9b1cfa40f355f42e0681ef17c2223bc5651c0db8a36d51f5380df98223ad59" gracePeriod=30 Oct 11 05:10:46 crc kubenswrapper[4651]: I1011 05:10:46.482913 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="074c84b3-f777-4c05-89dc-6e55dd72a2b9" containerName="sg-core" containerID="cri-o://91ec52c4f6fd17b99366ac76e5cb220299dd84af0a25c0368bf36cd642f8d1bd" gracePeriod=30 Oct 11 05:10:46 crc kubenswrapper[4651]: I1011 05:10:46.851850 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 11 05:10:46 crc kubenswrapper[4651]: I1011 05:10:46.871005 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c7b6c5df9-twwgp" event={"ID":"07814860-7a0a-48b8-996c-3472c44897f3","Type":"ContainerStarted","Data":"a3f7364e84451290999c632504bf3c62cfde20321efb17b0b64c351964edbd97"} Oct 11 05:10:46 crc kubenswrapper[4651]: I1011 05:10:46.871155 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5c7b6c5df9-twwgp" Oct 11 05:10:46 crc kubenswrapper[4651]: I1011 05:10:46.875395 4651 generic.go:334] "Generic (PLEG): container finished" podID="074c84b3-f777-4c05-89dc-6e55dd72a2b9" containerID="7ded35d600cb77aff8b1b70082f2d923aa6ae14eff4a18e1b7fd0795b00c2f5f" exitCode=0 Oct 11 05:10:46 crc kubenswrapper[4651]: I1011 05:10:46.875424 4651 generic.go:334] "Generic (PLEG): container finished" podID="074c84b3-f777-4c05-89dc-6e55dd72a2b9" containerID="91ec52c4f6fd17b99366ac76e5cb220299dd84af0a25c0368bf36cd642f8d1bd" exitCode=2 Oct 11 05:10:46 crc kubenswrapper[4651]: I1011 05:10:46.876087 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"074c84b3-f777-4c05-89dc-6e55dd72a2b9","Type":"ContainerDied","Data":"7ded35d600cb77aff8b1b70082f2d923aa6ae14eff4a18e1b7fd0795b00c2f5f"} Oct 11 05:10:46 crc kubenswrapper[4651]: I1011 05:10:46.876172 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"074c84b3-f777-4c05-89dc-6e55dd72a2b9","Type":"ContainerDied","Data":"91ec52c4f6fd17b99366ac76e5cb220299dd84af0a25c0368bf36cd642f8d1bd"} Oct 11 05:10:46 crc kubenswrapper[4651]: I1011 05:10:46.876115 4651 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"58a7a61e6423d5c4aad48bf422e788efe6a0897625015570766366bb08a19f53"} pod="openshift-machine-config-operator/machine-config-daemon-78jnv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 11 05:10:46 crc kubenswrapper[4651]: I1011 05:10:46.876352 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="b408a1bd-6065-42a6-bf31-eebef8ad3452" containerName="nova-api-log" containerID="cri-o://912b659cdc950dcf0e19edd694bfe4dd9bf7f6191b9d307bf0b8d781734b7383" gracePeriod=30 Oct 11 05:10:46 crc kubenswrapper[4651]: I1011 05:10:46.876364 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" containerName="machine-config-daemon" containerID="cri-o://58a7a61e6423d5c4aad48bf422e788efe6a0897625015570766366bb08a19f53" gracePeriod=600 Oct 11 05:10:46 crc kubenswrapper[4651]: I1011 05:10:46.876385 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="b408a1bd-6065-42a6-bf31-eebef8ad3452" containerName="nova-api-api" containerID="cri-o://f0b28a78eb28df817bee1c747a80b9adc90d39337615f1fa89d1ed491b1d3293" gracePeriod=30 Oct 11 05:10:46 crc kubenswrapper[4651]: I1011 05:10:46.914371 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5c7b6c5df9-twwgp" podStartSLOduration=2.9143465539999998 podStartE2EDuration="2.914346554s" podCreationTimestamp="2025-10-11 05:10:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 05:10:46.906234091 +0000 UTC m=+1167.802466927" watchObservedRunningTime="2025-10-11 05:10:46.914346554 +0000 UTC m=+1167.810579370" Oct 11 05:10:47 crc kubenswrapper[4651]: I1011 05:10:47.476451 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Oct 11 05:10:47 crc kubenswrapper[4651]: I1011 05:10:47.899564 4651 generic.go:334] "Generic (PLEG): container finished" podID="074c84b3-f777-4c05-89dc-6e55dd72a2b9" containerID="219dd73522998cd618c6f9c9561803b95ff3a4b743770fdda950016bd0acd214" exitCode=0 Oct 11 05:10:47 crc kubenswrapper[4651]: I1011 05:10:47.900608 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"074c84b3-f777-4c05-89dc-6e55dd72a2b9","Type":"ContainerDied","Data":"219dd73522998cd618c6f9c9561803b95ff3a4b743770fdda950016bd0acd214"} Oct 11 05:10:47 crc kubenswrapper[4651]: I1011 05:10:47.903874 4651 generic.go:334] "Generic (PLEG): container finished" podID="b408a1bd-6065-42a6-bf31-eebef8ad3452" containerID="912b659cdc950dcf0e19edd694bfe4dd9bf7f6191b9d307bf0b8d781734b7383" exitCode=143 Oct 11 05:10:47 crc kubenswrapper[4651]: I1011 05:10:47.904013 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b408a1bd-6065-42a6-bf31-eebef8ad3452","Type":"ContainerDied","Data":"912b659cdc950dcf0e19edd694bfe4dd9bf7f6191b9d307bf0b8d781734b7383"} Oct 11 05:10:47 crc kubenswrapper[4651]: I1011 05:10:47.907776 4651 generic.go:334] "Generic (PLEG): container finished" podID="519a1ae1-e964-48b0-8b61-835146df28c1" containerID="58a7a61e6423d5c4aad48bf422e788efe6a0897625015570766366bb08a19f53" exitCode=0 Oct 11 05:10:47 crc kubenswrapper[4651]: I1011 05:10:47.907952 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" event={"ID":"519a1ae1-e964-48b0-8b61-835146df28c1","Type":"ContainerDied","Data":"58a7a61e6423d5c4aad48bf422e788efe6a0897625015570766366bb08a19f53"} Oct 11 05:10:47 crc kubenswrapper[4651]: I1011 05:10:47.908041 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" event={"ID":"519a1ae1-e964-48b0-8b61-835146df28c1","Type":"ContainerStarted","Data":"fdfc9e3f19e3d1ca9aaf28ab30da5e0edfd8b9e029feb471e05d551727bd0ad1"} Oct 11 05:10:47 crc kubenswrapper[4651]: I1011 05:10:47.908157 4651 scope.go:117] "RemoveContainer" containerID="bbcfc5308211a05ce73ce546a00d78ee49c4d35fa44427537e93a8a405fe9270" Oct 11 05:10:50 crc kubenswrapper[4651]: I1011 05:10:50.505427 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 11 05:10:50 crc kubenswrapper[4651]: I1011 05:10:50.556770 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rl727\" (UniqueName: \"kubernetes.io/projected/b408a1bd-6065-42a6-bf31-eebef8ad3452-kube-api-access-rl727\") pod \"b408a1bd-6065-42a6-bf31-eebef8ad3452\" (UID: \"b408a1bd-6065-42a6-bf31-eebef8ad3452\") " Oct 11 05:10:50 crc kubenswrapper[4651]: I1011 05:10:50.556895 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b408a1bd-6065-42a6-bf31-eebef8ad3452-combined-ca-bundle\") pod \"b408a1bd-6065-42a6-bf31-eebef8ad3452\" (UID: \"b408a1bd-6065-42a6-bf31-eebef8ad3452\") " Oct 11 05:10:50 crc kubenswrapper[4651]: I1011 05:10:50.556980 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b408a1bd-6065-42a6-bf31-eebef8ad3452-config-data\") pod \"b408a1bd-6065-42a6-bf31-eebef8ad3452\" (UID: \"b408a1bd-6065-42a6-bf31-eebef8ad3452\") " Oct 11 05:10:50 crc kubenswrapper[4651]: I1011 05:10:50.557004 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b408a1bd-6065-42a6-bf31-eebef8ad3452-logs\") pod \"b408a1bd-6065-42a6-bf31-eebef8ad3452\" (UID: \"b408a1bd-6065-42a6-bf31-eebef8ad3452\") " Oct 11 05:10:50 crc kubenswrapper[4651]: I1011 05:10:50.558019 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b408a1bd-6065-42a6-bf31-eebef8ad3452-logs" (OuterVolumeSpecName: "logs") pod "b408a1bd-6065-42a6-bf31-eebef8ad3452" (UID: "b408a1bd-6065-42a6-bf31-eebef8ad3452"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 05:10:50 crc kubenswrapper[4651]: I1011 05:10:50.574208 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b408a1bd-6065-42a6-bf31-eebef8ad3452-kube-api-access-rl727" (OuterVolumeSpecName: "kube-api-access-rl727") pod "b408a1bd-6065-42a6-bf31-eebef8ad3452" (UID: "b408a1bd-6065-42a6-bf31-eebef8ad3452"). InnerVolumeSpecName "kube-api-access-rl727". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:10:50 crc kubenswrapper[4651]: I1011 05:10:50.591079 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b408a1bd-6065-42a6-bf31-eebef8ad3452-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b408a1bd-6065-42a6-bf31-eebef8ad3452" (UID: "b408a1bd-6065-42a6-bf31-eebef8ad3452"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:10:50 crc kubenswrapper[4651]: I1011 05:10:50.611980 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b408a1bd-6065-42a6-bf31-eebef8ad3452-config-data" (OuterVolumeSpecName: "config-data") pod "b408a1bd-6065-42a6-bf31-eebef8ad3452" (UID: "b408a1bd-6065-42a6-bf31-eebef8ad3452"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:10:50 crc kubenswrapper[4651]: I1011 05:10:50.658425 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rl727\" (UniqueName: \"kubernetes.io/projected/b408a1bd-6065-42a6-bf31-eebef8ad3452-kube-api-access-rl727\") on node \"crc\" DevicePath \"\"" Oct 11 05:10:50 crc kubenswrapper[4651]: I1011 05:10:50.658461 4651 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b408a1bd-6065-42a6-bf31-eebef8ad3452-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 05:10:50 crc kubenswrapper[4651]: I1011 05:10:50.658471 4651 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b408a1bd-6065-42a6-bf31-eebef8ad3452-config-data\") on node \"crc\" DevicePath \"\"" Oct 11 05:10:50 crc kubenswrapper[4651]: I1011 05:10:50.658481 4651 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b408a1bd-6065-42a6-bf31-eebef8ad3452-logs\") on node \"crc\" DevicePath \"\"" Oct 11 05:10:50 crc kubenswrapper[4651]: I1011 05:10:50.942745 4651 generic.go:334] "Generic (PLEG): container finished" podID="b408a1bd-6065-42a6-bf31-eebef8ad3452" containerID="f0b28a78eb28df817bee1c747a80b9adc90d39337615f1fa89d1ed491b1d3293" exitCode=0 Oct 11 05:10:50 crc kubenswrapper[4651]: I1011 05:10:50.942792 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b408a1bd-6065-42a6-bf31-eebef8ad3452","Type":"ContainerDied","Data":"f0b28a78eb28df817bee1c747a80b9adc90d39337615f1fa89d1ed491b1d3293"} Oct 11 05:10:50 crc kubenswrapper[4651]: I1011 05:10:50.942867 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b408a1bd-6065-42a6-bf31-eebef8ad3452","Type":"ContainerDied","Data":"dfbd19b00304cd514f643d0bda6a1d0eaa6ed405fd6a061696c5c8d0bd0aed2a"} Oct 11 05:10:50 crc kubenswrapper[4651]: I1011 05:10:50.942889 4651 scope.go:117] "RemoveContainer" containerID="f0b28a78eb28df817bee1c747a80b9adc90d39337615f1fa89d1ed491b1d3293" Oct 11 05:10:50 crc kubenswrapper[4651]: I1011 05:10:50.943034 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 11 05:10:50 crc kubenswrapper[4651]: I1011 05:10:50.973690 4651 scope.go:117] "RemoveContainer" containerID="912b659cdc950dcf0e19edd694bfe4dd9bf7f6191b9d307bf0b8d781734b7383" Oct 11 05:10:50 crc kubenswrapper[4651]: I1011 05:10:50.977173 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 11 05:10:50 crc kubenswrapper[4651]: I1011 05:10:50.986931 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Oct 11 05:10:51 crc kubenswrapper[4651]: I1011 05:10:51.001019 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Oct 11 05:10:51 crc kubenswrapper[4651]: E1011 05:10:51.001539 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b408a1bd-6065-42a6-bf31-eebef8ad3452" containerName="nova-api-log" Oct 11 05:10:51 crc kubenswrapper[4651]: I1011 05:10:51.001562 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="b408a1bd-6065-42a6-bf31-eebef8ad3452" containerName="nova-api-log" Oct 11 05:10:51 crc kubenswrapper[4651]: E1011 05:10:51.001582 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b408a1bd-6065-42a6-bf31-eebef8ad3452" containerName="nova-api-api" Oct 11 05:10:51 crc kubenswrapper[4651]: I1011 05:10:51.001591 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="b408a1bd-6065-42a6-bf31-eebef8ad3452" containerName="nova-api-api" Oct 11 05:10:51 crc kubenswrapper[4651]: I1011 05:10:51.001799 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="b408a1bd-6065-42a6-bf31-eebef8ad3452" containerName="nova-api-api" Oct 11 05:10:51 crc kubenswrapper[4651]: I1011 05:10:51.001852 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="b408a1bd-6065-42a6-bf31-eebef8ad3452" containerName="nova-api-log" Oct 11 05:10:51 crc kubenswrapper[4651]: I1011 05:10:51.003046 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 11 05:10:51 crc kubenswrapper[4651]: I1011 05:10:51.008037 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Oct 11 05:10:51 crc kubenswrapper[4651]: I1011 05:10:51.008536 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Oct 11 05:10:51 crc kubenswrapper[4651]: I1011 05:10:51.008751 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Oct 11 05:10:51 crc kubenswrapper[4651]: I1011 05:10:51.013797 4651 scope.go:117] "RemoveContainer" containerID="f0b28a78eb28df817bee1c747a80b9adc90d39337615f1fa89d1ed491b1d3293" Oct 11 05:10:51 crc kubenswrapper[4651]: E1011 05:10:51.014578 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f0b28a78eb28df817bee1c747a80b9adc90d39337615f1fa89d1ed491b1d3293\": container with ID starting with f0b28a78eb28df817bee1c747a80b9adc90d39337615f1fa89d1ed491b1d3293 not found: ID does not exist" containerID="f0b28a78eb28df817bee1c747a80b9adc90d39337615f1fa89d1ed491b1d3293" Oct 11 05:10:51 crc kubenswrapper[4651]: I1011 05:10:51.014631 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f0b28a78eb28df817bee1c747a80b9adc90d39337615f1fa89d1ed491b1d3293"} err="failed to get container status \"f0b28a78eb28df817bee1c747a80b9adc90d39337615f1fa89d1ed491b1d3293\": rpc error: code = NotFound desc = could not find container \"f0b28a78eb28df817bee1c747a80b9adc90d39337615f1fa89d1ed491b1d3293\": container with ID starting with f0b28a78eb28df817bee1c747a80b9adc90d39337615f1fa89d1ed491b1d3293 not found: ID does not exist" Oct 11 05:10:51 crc kubenswrapper[4651]: I1011 05:10:51.014661 4651 scope.go:117] "RemoveContainer" containerID="912b659cdc950dcf0e19edd694bfe4dd9bf7f6191b9d307bf0b8d781734b7383" Oct 11 05:10:51 crc kubenswrapper[4651]: E1011 05:10:51.015080 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"912b659cdc950dcf0e19edd694bfe4dd9bf7f6191b9d307bf0b8d781734b7383\": container with ID starting with 912b659cdc950dcf0e19edd694bfe4dd9bf7f6191b9d307bf0b8d781734b7383 not found: ID does not exist" containerID="912b659cdc950dcf0e19edd694bfe4dd9bf7f6191b9d307bf0b8d781734b7383" Oct 11 05:10:51 crc kubenswrapper[4651]: I1011 05:10:51.015112 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"912b659cdc950dcf0e19edd694bfe4dd9bf7f6191b9d307bf0b8d781734b7383"} err="failed to get container status \"912b659cdc950dcf0e19edd694bfe4dd9bf7f6191b9d307bf0b8d781734b7383\": rpc error: code = NotFound desc = could not find container \"912b659cdc950dcf0e19edd694bfe4dd9bf7f6191b9d307bf0b8d781734b7383\": container with ID starting with 912b659cdc950dcf0e19edd694bfe4dd9bf7f6191b9d307bf0b8d781734b7383 not found: ID does not exist" Oct 11 05:10:51 crc kubenswrapper[4651]: I1011 05:10:51.024336 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 11 05:10:51 crc kubenswrapper[4651]: I1011 05:10:51.167882 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/701bb8ef-4d1a-4c19-b102-f57ace7ce0eb-logs\") pod \"nova-api-0\" (UID: \"701bb8ef-4d1a-4c19-b102-f57ace7ce0eb\") " pod="openstack/nova-api-0" Oct 11 05:10:51 crc kubenswrapper[4651]: I1011 05:10:51.168271 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/701bb8ef-4d1a-4c19-b102-f57ace7ce0eb-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"701bb8ef-4d1a-4c19-b102-f57ace7ce0eb\") " pod="openstack/nova-api-0" Oct 11 05:10:51 crc kubenswrapper[4651]: I1011 05:10:51.168376 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/701bb8ef-4d1a-4c19-b102-f57ace7ce0eb-internal-tls-certs\") pod \"nova-api-0\" (UID: \"701bb8ef-4d1a-4c19-b102-f57ace7ce0eb\") " pod="openstack/nova-api-0" Oct 11 05:10:51 crc kubenswrapper[4651]: I1011 05:10:51.168471 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/701bb8ef-4d1a-4c19-b102-f57ace7ce0eb-public-tls-certs\") pod \"nova-api-0\" (UID: \"701bb8ef-4d1a-4c19-b102-f57ace7ce0eb\") " pod="openstack/nova-api-0" Oct 11 05:10:51 crc kubenswrapper[4651]: I1011 05:10:51.168509 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/701bb8ef-4d1a-4c19-b102-f57ace7ce0eb-config-data\") pod \"nova-api-0\" (UID: \"701bb8ef-4d1a-4c19-b102-f57ace7ce0eb\") " pod="openstack/nova-api-0" Oct 11 05:10:51 crc kubenswrapper[4651]: I1011 05:10:51.168541 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w622c\" (UniqueName: \"kubernetes.io/projected/701bb8ef-4d1a-4c19-b102-f57ace7ce0eb-kube-api-access-w622c\") pod \"nova-api-0\" (UID: \"701bb8ef-4d1a-4c19-b102-f57ace7ce0eb\") " pod="openstack/nova-api-0" Oct 11 05:10:51 crc kubenswrapper[4651]: I1011 05:10:51.269909 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/701bb8ef-4d1a-4c19-b102-f57ace7ce0eb-internal-tls-certs\") pod \"nova-api-0\" (UID: \"701bb8ef-4d1a-4c19-b102-f57ace7ce0eb\") " pod="openstack/nova-api-0" Oct 11 05:10:51 crc kubenswrapper[4651]: I1011 05:10:51.269973 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/701bb8ef-4d1a-4c19-b102-f57ace7ce0eb-public-tls-certs\") pod \"nova-api-0\" (UID: \"701bb8ef-4d1a-4c19-b102-f57ace7ce0eb\") " pod="openstack/nova-api-0" Oct 11 05:10:51 crc kubenswrapper[4651]: I1011 05:10:51.270001 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/701bb8ef-4d1a-4c19-b102-f57ace7ce0eb-config-data\") pod \"nova-api-0\" (UID: \"701bb8ef-4d1a-4c19-b102-f57ace7ce0eb\") " pod="openstack/nova-api-0" Oct 11 05:10:51 crc kubenswrapper[4651]: I1011 05:10:51.270046 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w622c\" (UniqueName: \"kubernetes.io/projected/701bb8ef-4d1a-4c19-b102-f57ace7ce0eb-kube-api-access-w622c\") pod \"nova-api-0\" (UID: \"701bb8ef-4d1a-4c19-b102-f57ace7ce0eb\") " pod="openstack/nova-api-0" Oct 11 05:10:51 crc kubenswrapper[4651]: I1011 05:10:51.270757 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/701bb8ef-4d1a-4c19-b102-f57ace7ce0eb-logs\") pod \"nova-api-0\" (UID: \"701bb8ef-4d1a-4c19-b102-f57ace7ce0eb\") " pod="openstack/nova-api-0" Oct 11 05:10:51 crc kubenswrapper[4651]: I1011 05:10:51.270791 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/701bb8ef-4d1a-4c19-b102-f57ace7ce0eb-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"701bb8ef-4d1a-4c19-b102-f57ace7ce0eb\") " pod="openstack/nova-api-0" Oct 11 05:10:51 crc kubenswrapper[4651]: I1011 05:10:51.271491 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/701bb8ef-4d1a-4c19-b102-f57ace7ce0eb-logs\") pod \"nova-api-0\" (UID: \"701bb8ef-4d1a-4c19-b102-f57ace7ce0eb\") " pod="openstack/nova-api-0" Oct 11 05:10:51 crc kubenswrapper[4651]: I1011 05:10:51.275148 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/701bb8ef-4d1a-4c19-b102-f57ace7ce0eb-config-data\") pod \"nova-api-0\" (UID: \"701bb8ef-4d1a-4c19-b102-f57ace7ce0eb\") " pod="openstack/nova-api-0" Oct 11 05:10:51 crc kubenswrapper[4651]: I1011 05:10:51.275607 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/701bb8ef-4d1a-4c19-b102-f57ace7ce0eb-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"701bb8ef-4d1a-4c19-b102-f57ace7ce0eb\") " pod="openstack/nova-api-0" Oct 11 05:10:51 crc kubenswrapper[4651]: I1011 05:10:51.277261 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/701bb8ef-4d1a-4c19-b102-f57ace7ce0eb-public-tls-certs\") pod \"nova-api-0\" (UID: \"701bb8ef-4d1a-4c19-b102-f57ace7ce0eb\") " pod="openstack/nova-api-0" Oct 11 05:10:51 crc kubenswrapper[4651]: I1011 05:10:51.277626 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/701bb8ef-4d1a-4c19-b102-f57ace7ce0eb-internal-tls-certs\") pod \"nova-api-0\" (UID: \"701bb8ef-4d1a-4c19-b102-f57ace7ce0eb\") " pod="openstack/nova-api-0" Oct 11 05:10:51 crc kubenswrapper[4651]: I1011 05:10:51.285243 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w622c\" (UniqueName: \"kubernetes.io/projected/701bb8ef-4d1a-4c19-b102-f57ace7ce0eb-kube-api-access-w622c\") pod \"nova-api-0\" (UID: \"701bb8ef-4d1a-4c19-b102-f57ace7ce0eb\") " pod="openstack/nova-api-0" Oct 11 05:10:51 crc kubenswrapper[4651]: I1011 05:10:51.331762 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 11 05:10:51 crc kubenswrapper[4651]: I1011 05:10:51.827733 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 11 05:10:51 crc kubenswrapper[4651]: W1011 05:10:51.832461 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod701bb8ef_4d1a_4c19_b102_f57ace7ce0eb.slice/crio-f9ef0e94a344285e19fc31eb28437f0f39b561d645d772ff3d978d3322380e35 WatchSource:0}: Error finding container f9ef0e94a344285e19fc31eb28437f0f39b561d645d772ff3d978d3322380e35: Status 404 returned error can't find the container with id f9ef0e94a344285e19fc31eb28437f0f39b561d645d772ff3d978d3322380e35 Oct 11 05:10:51 crc kubenswrapper[4651]: I1011 05:10:51.891193 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b408a1bd-6065-42a6-bf31-eebef8ad3452" path="/var/lib/kubelet/pods/b408a1bd-6065-42a6-bf31-eebef8ad3452/volumes" Oct 11 05:10:51 crc kubenswrapper[4651]: I1011 05:10:51.959193 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"701bb8ef-4d1a-4c19-b102-f57ace7ce0eb","Type":"ContainerStarted","Data":"f9ef0e94a344285e19fc31eb28437f0f39b561d645d772ff3d978d3322380e35"} Oct 11 05:10:52 crc kubenswrapper[4651]: I1011 05:10:52.477141 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Oct 11 05:10:52 crc kubenswrapper[4651]: I1011 05:10:52.502268 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Oct 11 05:10:52 crc kubenswrapper[4651]: I1011 05:10:52.976009 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"701bb8ef-4d1a-4c19-b102-f57ace7ce0eb","Type":"ContainerStarted","Data":"3fc618a356631d18389c5a3acc001352a30b83769ac26c3cc298bbf5474e8c74"} Oct 11 05:10:52 crc kubenswrapper[4651]: I1011 05:10:52.976130 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"701bb8ef-4d1a-4c19-b102-f57ace7ce0eb","Type":"ContainerStarted","Data":"8f62314e5537dcaf84c8da5ad9f9826ec12a97b6c6855430dee5491123e6517e"} Oct 11 05:10:52 crc kubenswrapper[4651]: I1011 05:10:52.995230 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Oct 11 05:10:53 crc kubenswrapper[4651]: I1011 05:10:53.004659 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=3.004636911 podStartE2EDuration="3.004636911s" podCreationTimestamp="2025-10-11 05:10:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 05:10:52.998193791 +0000 UTC m=+1173.894426667" watchObservedRunningTime="2025-10-11 05:10:53.004636911 +0000 UTC m=+1173.900869737" Oct 11 05:10:53 crc kubenswrapper[4651]: I1011 05:10:53.303087 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-9mvdn"] Oct 11 05:10:53 crc kubenswrapper[4651]: I1011 05:10:53.304223 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-9mvdn" Oct 11 05:10:53 crc kubenswrapper[4651]: I1011 05:10:53.306633 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Oct 11 05:10:53 crc kubenswrapper[4651]: I1011 05:10:53.307210 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Oct 11 05:10:53 crc kubenswrapper[4651]: I1011 05:10:53.327694 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-9mvdn"] Oct 11 05:10:53 crc kubenswrapper[4651]: I1011 05:10:53.414928 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ea769ce-f3c9-481f-9544-0d096ba2d0c8-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-9mvdn\" (UID: \"8ea769ce-f3c9-481f-9544-0d096ba2d0c8\") " pod="openstack/nova-cell1-cell-mapping-9mvdn" Oct 11 05:10:53 crc kubenswrapper[4651]: I1011 05:10:53.415263 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8ea769ce-f3c9-481f-9544-0d096ba2d0c8-scripts\") pod \"nova-cell1-cell-mapping-9mvdn\" (UID: \"8ea769ce-f3c9-481f-9544-0d096ba2d0c8\") " pod="openstack/nova-cell1-cell-mapping-9mvdn" Oct 11 05:10:53 crc kubenswrapper[4651]: I1011 05:10:53.415292 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v897z\" (UniqueName: \"kubernetes.io/projected/8ea769ce-f3c9-481f-9544-0d096ba2d0c8-kube-api-access-v897z\") pod \"nova-cell1-cell-mapping-9mvdn\" (UID: \"8ea769ce-f3c9-481f-9544-0d096ba2d0c8\") " pod="openstack/nova-cell1-cell-mapping-9mvdn" Oct 11 05:10:53 crc kubenswrapper[4651]: I1011 05:10:53.415470 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ea769ce-f3c9-481f-9544-0d096ba2d0c8-config-data\") pod \"nova-cell1-cell-mapping-9mvdn\" (UID: \"8ea769ce-f3c9-481f-9544-0d096ba2d0c8\") " pod="openstack/nova-cell1-cell-mapping-9mvdn" Oct 11 05:10:53 crc kubenswrapper[4651]: I1011 05:10:53.516837 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ea769ce-f3c9-481f-9544-0d096ba2d0c8-config-data\") pod \"nova-cell1-cell-mapping-9mvdn\" (UID: \"8ea769ce-f3c9-481f-9544-0d096ba2d0c8\") " pod="openstack/nova-cell1-cell-mapping-9mvdn" Oct 11 05:10:53 crc kubenswrapper[4651]: I1011 05:10:53.516953 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ea769ce-f3c9-481f-9544-0d096ba2d0c8-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-9mvdn\" (UID: \"8ea769ce-f3c9-481f-9544-0d096ba2d0c8\") " pod="openstack/nova-cell1-cell-mapping-9mvdn" Oct 11 05:10:53 crc kubenswrapper[4651]: I1011 05:10:53.516993 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8ea769ce-f3c9-481f-9544-0d096ba2d0c8-scripts\") pod \"nova-cell1-cell-mapping-9mvdn\" (UID: \"8ea769ce-f3c9-481f-9544-0d096ba2d0c8\") " pod="openstack/nova-cell1-cell-mapping-9mvdn" Oct 11 05:10:53 crc kubenswrapper[4651]: I1011 05:10:53.517011 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v897z\" (UniqueName: \"kubernetes.io/projected/8ea769ce-f3c9-481f-9544-0d096ba2d0c8-kube-api-access-v897z\") pod \"nova-cell1-cell-mapping-9mvdn\" (UID: \"8ea769ce-f3c9-481f-9544-0d096ba2d0c8\") " pod="openstack/nova-cell1-cell-mapping-9mvdn" Oct 11 05:10:53 crc kubenswrapper[4651]: I1011 05:10:53.522506 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ea769ce-f3c9-481f-9544-0d096ba2d0c8-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-9mvdn\" (UID: \"8ea769ce-f3c9-481f-9544-0d096ba2d0c8\") " pod="openstack/nova-cell1-cell-mapping-9mvdn" Oct 11 05:10:53 crc kubenswrapper[4651]: I1011 05:10:53.522503 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ea769ce-f3c9-481f-9544-0d096ba2d0c8-config-data\") pod \"nova-cell1-cell-mapping-9mvdn\" (UID: \"8ea769ce-f3c9-481f-9544-0d096ba2d0c8\") " pod="openstack/nova-cell1-cell-mapping-9mvdn" Oct 11 05:10:53 crc kubenswrapper[4651]: I1011 05:10:53.525131 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8ea769ce-f3c9-481f-9544-0d096ba2d0c8-scripts\") pod \"nova-cell1-cell-mapping-9mvdn\" (UID: \"8ea769ce-f3c9-481f-9544-0d096ba2d0c8\") " pod="openstack/nova-cell1-cell-mapping-9mvdn" Oct 11 05:10:53 crc kubenswrapper[4651]: I1011 05:10:53.543367 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v897z\" (UniqueName: \"kubernetes.io/projected/8ea769ce-f3c9-481f-9544-0d096ba2d0c8-kube-api-access-v897z\") pod \"nova-cell1-cell-mapping-9mvdn\" (UID: \"8ea769ce-f3c9-481f-9544-0d096ba2d0c8\") " pod="openstack/nova-cell1-cell-mapping-9mvdn" Oct 11 05:10:53 crc kubenswrapper[4651]: I1011 05:10:53.624448 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-9mvdn" Oct 11 05:10:53 crc kubenswrapper[4651]: I1011 05:10:53.893099 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-9mvdn"] Oct 11 05:10:53 crc kubenswrapper[4651]: W1011 05:10:53.893589 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8ea769ce_f3c9_481f_9544_0d096ba2d0c8.slice/crio-6c73c666791eea40ca3f41c8130160ac65a99be622c608e8357d49980c57343f WatchSource:0}: Error finding container 6c73c666791eea40ca3f41c8130160ac65a99be622c608e8357d49980c57343f: Status 404 returned error can't find the container with id 6c73c666791eea40ca3f41c8130160ac65a99be622c608e8357d49980c57343f Oct 11 05:10:53 crc kubenswrapper[4651]: I1011 05:10:53.987800 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-9mvdn" event={"ID":"8ea769ce-f3c9-481f-9544-0d096ba2d0c8","Type":"ContainerStarted","Data":"6c73c666791eea40ca3f41c8130160ac65a99be622c608e8357d49980c57343f"} Oct 11 05:10:54 crc kubenswrapper[4651]: I1011 05:10:54.810971 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5c7b6c5df9-twwgp" Oct 11 05:10:54 crc kubenswrapper[4651]: I1011 05:10:54.911244 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-865f5d856f-hjq47"] Oct 11 05:10:54 crc kubenswrapper[4651]: I1011 05:10:54.911697 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-865f5d856f-hjq47" podUID="a4717124-cc6a-4961-9828-a89d0132ba8a" containerName="dnsmasq-dns" containerID="cri-o://322b84020b523200bb81cab8d12535239913338f3b2748d01164b6fcfcbdba29" gracePeriod=10 Oct 11 05:10:54 crc kubenswrapper[4651]: I1011 05:10:54.999188 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-9mvdn" event={"ID":"8ea769ce-f3c9-481f-9544-0d096ba2d0c8","Type":"ContainerStarted","Data":"2ff1626a641a3a945708d87d3446cb08598ac5bcf8b714ead332dd45584e7ea7"} Oct 11 05:10:55 crc kubenswrapper[4651]: I1011 05:10:55.020092 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-9mvdn" podStartSLOduration=2.020068022 podStartE2EDuration="2.020068022s" podCreationTimestamp="2025-10-11 05:10:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 05:10:55.014937194 +0000 UTC m=+1175.911170000" watchObservedRunningTime="2025-10-11 05:10:55.020068022 +0000 UTC m=+1175.916300818" Oct 11 05:10:55 crc kubenswrapper[4651]: I1011 05:10:55.433225 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-865f5d856f-hjq47" Oct 11 05:10:55 crc kubenswrapper[4651]: I1011 05:10:55.569477 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a4717124-cc6a-4961-9828-a89d0132ba8a-dns-swift-storage-0\") pod \"a4717124-cc6a-4961-9828-a89d0132ba8a\" (UID: \"a4717124-cc6a-4961-9828-a89d0132ba8a\") " Oct 11 05:10:55 crc kubenswrapper[4651]: I1011 05:10:55.569552 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a4717124-cc6a-4961-9828-a89d0132ba8a-dns-svc\") pod \"a4717124-cc6a-4961-9828-a89d0132ba8a\" (UID: \"a4717124-cc6a-4961-9828-a89d0132ba8a\") " Oct 11 05:10:55 crc kubenswrapper[4651]: I1011 05:10:55.569661 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a4717124-cc6a-4961-9828-a89d0132ba8a-config\") pod \"a4717124-cc6a-4961-9828-a89d0132ba8a\" (UID: \"a4717124-cc6a-4961-9828-a89d0132ba8a\") " Oct 11 05:10:55 crc kubenswrapper[4651]: I1011 05:10:55.569707 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xplw\" (UniqueName: \"kubernetes.io/projected/a4717124-cc6a-4961-9828-a89d0132ba8a-kube-api-access-9xplw\") pod \"a4717124-cc6a-4961-9828-a89d0132ba8a\" (UID: \"a4717124-cc6a-4961-9828-a89d0132ba8a\") " Oct 11 05:10:55 crc kubenswrapper[4651]: I1011 05:10:55.569743 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a4717124-cc6a-4961-9828-a89d0132ba8a-ovsdbserver-sb\") pod \"a4717124-cc6a-4961-9828-a89d0132ba8a\" (UID: \"a4717124-cc6a-4961-9828-a89d0132ba8a\") " Oct 11 05:10:55 crc kubenswrapper[4651]: I1011 05:10:55.569777 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a4717124-cc6a-4961-9828-a89d0132ba8a-ovsdbserver-nb\") pod \"a4717124-cc6a-4961-9828-a89d0132ba8a\" (UID: \"a4717124-cc6a-4961-9828-a89d0132ba8a\") " Oct 11 05:10:55 crc kubenswrapper[4651]: I1011 05:10:55.598063 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a4717124-cc6a-4961-9828-a89d0132ba8a-kube-api-access-9xplw" (OuterVolumeSpecName: "kube-api-access-9xplw") pod "a4717124-cc6a-4961-9828-a89d0132ba8a" (UID: "a4717124-cc6a-4961-9828-a89d0132ba8a"). InnerVolumeSpecName "kube-api-access-9xplw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:10:55 crc kubenswrapper[4651]: I1011 05:10:55.660541 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a4717124-cc6a-4961-9828-a89d0132ba8a-config" (OuterVolumeSpecName: "config") pod "a4717124-cc6a-4961-9828-a89d0132ba8a" (UID: "a4717124-cc6a-4961-9828-a89d0132ba8a"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:10:55 crc kubenswrapper[4651]: I1011 05:10:55.672370 4651 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a4717124-cc6a-4961-9828-a89d0132ba8a-config\") on node \"crc\" DevicePath \"\"" Oct 11 05:10:55 crc kubenswrapper[4651]: I1011 05:10:55.672400 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xplw\" (UniqueName: \"kubernetes.io/projected/a4717124-cc6a-4961-9828-a89d0132ba8a-kube-api-access-9xplw\") on node \"crc\" DevicePath \"\"" Oct 11 05:10:55 crc kubenswrapper[4651]: I1011 05:10:55.673521 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a4717124-cc6a-4961-9828-a89d0132ba8a-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "a4717124-cc6a-4961-9828-a89d0132ba8a" (UID: "a4717124-cc6a-4961-9828-a89d0132ba8a"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:10:55 crc kubenswrapper[4651]: I1011 05:10:55.687259 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a4717124-cc6a-4961-9828-a89d0132ba8a-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "a4717124-cc6a-4961-9828-a89d0132ba8a" (UID: "a4717124-cc6a-4961-9828-a89d0132ba8a"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:10:55 crc kubenswrapper[4651]: I1011 05:10:55.697595 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a4717124-cc6a-4961-9828-a89d0132ba8a-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "a4717124-cc6a-4961-9828-a89d0132ba8a" (UID: "a4717124-cc6a-4961-9828-a89d0132ba8a"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:10:55 crc kubenswrapper[4651]: I1011 05:10:55.702106 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a4717124-cc6a-4961-9828-a89d0132ba8a-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "a4717124-cc6a-4961-9828-a89d0132ba8a" (UID: "a4717124-cc6a-4961-9828-a89d0132ba8a"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:10:55 crc kubenswrapper[4651]: I1011 05:10:55.774538 4651 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a4717124-cc6a-4961-9828-a89d0132ba8a-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 11 05:10:55 crc kubenswrapper[4651]: I1011 05:10:55.774579 4651 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a4717124-cc6a-4961-9828-a89d0132ba8a-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 11 05:10:55 crc kubenswrapper[4651]: I1011 05:10:55.774589 4651 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a4717124-cc6a-4961-9828-a89d0132ba8a-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 11 05:10:55 crc kubenswrapper[4651]: I1011 05:10:55.774600 4651 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a4717124-cc6a-4961-9828-a89d0132ba8a-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 11 05:10:55 crc kubenswrapper[4651]: I1011 05:10:55.791985 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 11 05:10:55 crc kubenswrapper[4651]: I1011 05:10:55.979156 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/074c84b3-f777-4c05-89dc-6e55dd72a2b9-scripts\") pod \"074c84b3-f777-4c05-89dc-6e55dd72a2b9\" (UID: \"074c84b3-f777-4c05-89dc-6e55dd72a2b9\") " Oct 11 05:10:55 crc kubenswrapper[4651]: I1011 05:10:55.979228 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/074c84b3-f777-4c05-89dc-6e55dd72a2b9-config-data\") pod \"074c84b3-f777-4c05-89dc-6e55dd72a2b9\" (UID: \"074c84b3-f777-4c05-89dc-6e55dd72a2b9\") " Oct 11 05:10:55 crc kubenswrapper[4651]: I1011 05:10:55.979300 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/074c84b3-f777-4c05-89dc-6e55dd72a2b9-combined-ca-bundle\") pod \"074c84b3-f777-4c05-89dc-6e55dd72a2b9\" (UID: \"074c84b3-f777-4c05-89dc-6e55dd72a2b9\") " Oct 11 05:10:55 crc kubenswrapper[4651]: I1011 05:10:55.979347 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9rc28\" (UniqueName: \"kubernetes.io/projected/074c84b3-f777-4c05-89dc-6e55dd72a2b9-kube-api-access-9rc28\") pod \"074c84b3-f777-4c05-89dc-6e55dd72a2b9\" (UID: \"074c84b3-f777-4c05-89dc-6e55dd72a2b9\") " Oct 11 05:10:55 crc kubenswrapper[4651]: I1011 05:10:55.979369 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/074c84b3-f777-4c05-89dc-6e55dd72a2b9-run-httpd\") pod \"074c84b3-f777-4c05-89dc-6e55dd72a2b9\" (UID: \"074c84b3-f777-4c05-89dc-6e55dd72a2b9\") " Oct 11 05:10:55 crc kubenswrapper[4651]: I1011 05:10:55.979389 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/074c84b3-f777-4c05-89dc-6e55dd72a2b9-sg-core-conf-yaml\") pod \"074c84b3-f777-4c05-89dc-6e55dd72a2b9\" (UID: \"074c84b3-f777-4c05-89dc-6e55dd72a2b9\") " Oct 11 05:10:55 crc kubenswrapper[4651]: I1011 05:10:55.979427 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/074c84b3-f777-4c05-89dc-6e55dd72a2b9-log-httpd\") pod \"074c84b3-f777-4c05-89dc-6e55dd72a2b9\" (UID: \"074c84b3-f777-4c05-89dc-6e55dd72a2b9\") " Oct 11 05:10:55 crc kubenswrapper[4651]: I1011 05:10:55.979510 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/074c84b3-f777-4c05-89dc-6e55dd72a2b9-ceilometer-tls-certs\") pod \"074c84b3-f777-4c05-89dc-6e55dd72a2b9\" (UID: \"074c84b3-f777-4c05-89dc-6e55dd72a2b9\") " Oct 11 05:10:55 crc kubenswrapper[4651]: I1011 05:10:55.979843 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/074c84b3-f777-4c05-89dc-6e55dd72a2b9-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "074c84b3-f777-4c05-89dc-6e55dd72a2b9" (UID: "074c84b3-f777-4c05-89dc-6e55dd72a2b9"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 05:10:55 crc kubenswrapper[4651]: I1011 05:10:55.980146 4651 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/074c84b3-f777-4c05-89dc-6e55dd72a2b9-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 11 05:10:55 crc kubenswrapper[4651]: I1011 05:10:55.980231 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/074c84b3-f777-4c05-89dc-6e55dd72a2b9-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "074c84b3-f777-4c05-89dc-6e55dd72a2b9" (UID: "074c84b3-f777-4c05-89dc-6e55dd72a2b9"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 05:10:55 crc kubenswrapper[4651]: I1011 05:10:55.986748 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/074c84b3-f777-4c05-89dc-6e55dd72a2b9-kube-api-access-9rc28" (OuterVolumeSpecName: "kube-api-access-9rc28") pod "074c84b3-f777-4c05-89dc-6e55dd72a2b9" (UID: "074c84b3-f777-4c05-89dc-6e55dd72a2b9"). InnerVolumeSpecName "kube-api-access-9rc28". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:10:55 crc kubenswrapper[4651]: I1011 05:10:55.989041 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/074c84b3-f777-4c05-89dc-6e55dd72a2b9-scripts" (OuterVolumeSpecName: "scripts") pod "074c84b3-f777-4c05-89dc-6e55dd72a2b9" (UID: "074c84b3-f777-4c05-89dc-6e55dd72a2b9"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:10:56 crc kubenswrapper[4651]: I1011 05:10:56.027755 4651 generic.go:334] "Generic (PLEG): container finished" podID="a4717124-cc6a-4961-9828-a89d0132ba8a" containerID="322b84020b523200bb81cab8d12535239913338f3b2748d01164b6fcfcbdba29" exitCode=0 Oct 11 05:10:56 crc kubenswrapper[4651]: I1011 05:10:56.027899 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-865f5d856f-hjq47" event={"ID":"a4717124-cc6a-4961-9828-a89d0132ba8a","Type":"ContainerDied","Data":"322b84020b523200bb81cab8d12535239913338f3b2748d01164b6fcfcbdba29"} Oct 11 05:10:56 crc kubenswrapper[4651]: I1011 05:10:56.027926 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-865f5d856f-hjq47" event={"ID":"a4717124-cc6a-4961-9828-a89d0132ba8a","Type":"ContainerDied","Data":"b210b9ff59e2bcb0220d8678523ea723919d74c57a05326f3a2eb1c9a3f5dc49"} Oct 11 05:10:56 crc kubenswrapper[4651]: I1011 05:10:56.027941 4651 scope.go:117] "RemoveContainer" containerID="322b84020b523200bb81cab8d12535239913338f3b2748d01164b6fcfcbdba29" Oct 11 05:10:56 crc kubenswrapper[4651]: I1011 05:10:56.028064 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-865f5d856f-hjq47" Oct 11 05:10:56 crc kubenswrapper[4651]: I1011 05:10:56.033306 4651 generic.go:334] "Generic (PLEG): container finished" podID="074c84b3-f777-4c05-89dc-6e55dd72a2b9" containerID="2a9b1cfa40f355f42e0681ef17c2223bc5651c0db8a36d51f5380df98223ad59" exitCode=0 Oct 11 05:10:56 crc kubenswrapper[4651]: I1011 05:10:56.033438 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"074c84b3-f777-4c05-89dc-6e55dd72a2b9","Type":"ContainerDied","Data":"2a9b1cfa40f355f42e0681ef17c2223bc5651c0db8a36d51f5380df98223ad59"} Oct 11 05:10:56 crc kubenswrapper[4651]: I1011 05:10:56.033476 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"074c84b3-f777-4c05-89dc-6e55dd72a2b9","Type":"ContainerDied","Data":"7abf68480556773970568c5b6907eed71971e6f6e918e03e108359e238b648c6"} Oct 11 05:10:56 crc kubenswrapper[4651]: I1011 05:10:56.033587 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 11 05:10:56 crc kubenswrapper[4651]: I1011 05:10:56.046832 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/074c84b3-f777-4c05-89dc-6e55dd72a2b9-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "074c84b3-f777-4c05-89dc-6e55dd72a2b9" (UID: "074c84b3-f777-4c05-89dc-6e55dd72a2b9"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:10:56 crc kubenswrapper[4651]: I1011 05:10:56.062286 4651 scope.go:117] "RemoveContainer" containerID="354dcd1cf4794b626e50f7f06cca8c34ffa19f2d2cf103ac857e4649bddf2064" Oct 11 05:10:56 crc kubenswrapper[4651]: I1011 05:10:56.063499 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/074c84b3-f777-4c05-89dc-6e55dd72a2b9-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "074c84b3-f777-4c05-89dc-6e55dd72a2b9" (UID: "074c84b3-f777-4c05-89dc-6e55dd72a2b9"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:10:56 crc kubenswrapper[4651]: I1011 05:10:56.078674 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-865f5d856f-hjq47"] Oct 11 05:10:56 crc kubenswrapper[4651]: I1011 05:10:56.082471 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9rc28\" (UniqueName: \"kubernetes.io/projected/074c84b3-f777-4c05-89dc-6e55dd72a2b9-kube-api-access-9rc28\") on node \"crc\" DevicePath \"\"" Oct 11 05:10:56 crc kubenswrapper[4651]: I1011 05:10:56.082495 4651 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/074c84b3-f777-4c05-89dc-6e55dd72a2b9-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 11 05:10:56 crc kubenswrapper[4651]: I1011 05:10:56.082504 4651 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/074c84b3-f777-4c05-89dc-6e55dd72a2b9-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 11 05:10:56 crc kubenswrapper[4651]: I1011 05:10:56.082514 4651 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/074c84b3-f777-4c05-89dc-6e55dd72a2b9-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 11 05:10:56 crc kubenswrapper[4651]: I1011 05:10:56.082522 4651 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/074c84b3-f777-4c05-89dc-6e55dd72a2b9-scripts\") on node \"crc\" DevicePath \"\"" Oct 11 05:10:56 crc kubenswrapper[4651]: I1011 05:10:56.086901 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-865f5d856f-hjq47"] Oct 11 05:10:56 crc kubenswrapper[4651]: I1011 05:10:56.094070 4651 scope.go:117] "RemoveContainer" containerID="322b84020b523200bb81cab8d12535239913338f3b2748d01164b6fcfcbdba29" Oct 11 05:10:56 crc kubenswrapper[4651]: E1011 05:10:56.094560 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"322b84020b523200bb81cab8d12535239913338f3b2748d01164b6fcfcbdba29\": container with ID starting with 322b84020b523200bb81cab8d12535239913338f3b2748d01164b6fcfcbdba29 not found: ID does not exist" containerID="322b84020b523200bb81cab8d12535239913338f3b2748d01164b6fcfcbdba29" Oct 11 05:10:56 crc kubenswrapper[4651]: I1011 05:10:56.094603 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"322b84020b523200bb81cab8d12535239913338f3b2748d01164b6fcfcbdba29"} err="failed to get container status \"322b84020b523200bb81cab8d12535239913338f3b2748d01164b6fcfcbdba29\": rpc error: code = NotFound desc = could not find container \"322b84020b523200bb81cab8d12535239913338f3b2748d01164b6fcfcbdba29\": container with ID starting with 322b84020b523200bb81cab8d12535239913338f3b2748d01164b6fcfcbdba29 not found: ID does not exist" Oct 11 05:10:56 crc kubenswrapper[4651]: I1011 05:10:56.094632 4651 scope.go:117] "RemoveContainer" containerID="354dcd1cf4794b626e50f7f06cca8c34ffa19f2d2cf103ac857e4649bddf2064" Oct 11 05:10:56 crc kubenswrapper[4651]: E1011 05:10:56.095145 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"354dcd1cf4794b626e50f7f06cca8c34ffa19f2d2cf103ac857e4649bddf2064\": container with ID starting with 354dcd1cf4794b626e50f7f06cca8c34ffa19f2d2cf103ac857e4649bddf2064 not found: ID does not exist" containerID="354dcd1cf4794b626e50f7f06cca8c34ffa19f2d2cf103ac857e4649bddf2064" Oct 11 05:10:56 crc kubenswrapper[4651]: I1011 05:10:56.095178 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"354dcd1cf4794b626e50f7f06cca8c34ffa19f2d2cf103ac857e4649bddf2064"} err="failed to get container status \"354dcd1cf4794b626e50f7f06cca8c34ffa19f2d2cf103ac857e4649bddf2064\": rpc error: code = NotFound desc = could not find container \"354dcd1cf4794b626e50f7f06cca8c34ffa19f2d2cf103ac857e4649bddf2064\": container with ID starting with 354dcd1cf4794b626e50f7f06cca8c34ffa19f2d2cf103ac857e4649bddf2064 not found: ID does not exist" Oct 11 05:10:56 crc kubenswrapper[4651]: I1011 05:10:56.095199 4651 scope.go:117] "RemoveContainer" containerID="7ded35d600cb77aff8b1b70082f2d923aa6ae14eff4a18e1b7fd0795b00c2f5f" Oct 11 05:10:56 crc kubenswrapper[4651]: I1011 05:10:56.109030 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/074c84b3-f777-4c05-89dc-6e55dd72a2b9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "074c84b3-f777-4c05-89dc-6e55dd72a2b9" (UID: "074c84b3-f777-4c05-89dc-6e55dd72a2b9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:10:56 crc kubenswrapper[4651]: I1011 05:10:56.120676 4651 scope.go:117] "RemoveContainer" containerID="91ec52c4f6fd17b99366ac76e5cb220299dd84af0a25c0368bf36cd642f8d1bd" Oct 11 05:10:56 crc kubenswrapper[4651]: I1011 05:10:56.129438 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/074c84b3-f777-4c05-89dc-6e55dd72a2b9-config-data" (OuterVolumeSpecName: "config-data") pod "074c84b3-f777-4c05-89dc-6e55dd72a2b9" (UID: "074c84b3-f777-4c05-89dc-6e55dd72a2b9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:10:56 crc kubenswrapper[4651]: I1011 05:10:56.151964 4651 scope.go:117] "RemoveContainer" containerID="2a9b1cfa40f355f42e0681ef17c2223bc5651c0db8a36d51f5380df98223ad59" Oct 11 05:10:56 crc kubenswrapper[4651]: I1011 05:10:56.175888 4651 scope.go:117] "RemoveContainer" containerID="219dd73522998cd618c6f9c9561803b95ff3a4b743770fdda950016bd0acd214" Oct 11 05:10:56 crc kubenswrapper[4651]: I1011 05:10:56.184192 4651 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/074c84b3-f777-4c05-89dc-6e55dd72a2b9-config-data\") on node \"crc\" DevicePath \"\"" Oct 11 05:10:56 crc kubenswrapper[4651]: I1011 05:10:56.184219 4651 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/074c84b3-f777-4c05-89dc-6e55dd72a2b9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 05:10:56 crc kubenswrapper[4651]: I1011 05:10:56.200338 4651 scope.go:117] "RemoveContainer" containerID="7ded35d600cb77aff8b1b70082f2d923aa6ae14eff4a18e1b7fd0795b00c2f5f" Oct 11 05:10:56 crc kubenswrapper[4651]: E1011 05:10:56.201210 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7ded35d600cb77aff8b1b70082f2d923aa6ae14eff4a18e1b7fd0795b00c2f5f\": container with ID starting with 7ded35d600cb77aff8b1b70082f2d923aa6ae14eff4a18e1b7fd0795b00c2f5f not found: ID does not exist" containerID="7ded35d600cb77aff8b1b70082f2d923aa6ae14eff4a18e1b7fd0795b00c2f5f" Oct 11 05:10:56 crc kubenswrapper[4651]: I1011 05:10:56.201240 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7ded35d600cb77aff8b1b70082f2d923aa6ae14eff4a18e1b7fd0795b00c2f5f"} err="failed to get container status \"7ded35d600cb77aff8b1b70082f2d923aa6ae14eff4a18e1b7fd0795b00c2f5f\": rpc error: code = NotFound desc = could not find container \"7ded35d600cb77aff8b1b70082f2d923aa6ae14eff4a18e1b7fd0795b00c2f5f\": container with ID starting with 7ded35d600cb77aff8b1b70082f2d923aa6ae14eff4a18e1b7fd0795b00c2f5f not found: ID does not exist" Oct 11 05:10:56 crc kubenswrapper[4651]: I1011 05:10:56.201262 4651 scope.go:117] "RemoveContainer" containerID="91ec52c4f6fd17b99366ac76e5cb220299dd84af0a25c0368bf36cd642f8d1bd" Oct 11 05:10:56 crc kubenswrapper[4651]: E1011 05:10:56.201723 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"91ec52c4f6fd17b99366ac76e5cb220299dd84af0a25c0368bf36cd642f8d1bd\": container with ID starting with 91ec52c4f6fd17b99366ac76e5cb220299dd84af0a25c0368bf36cd642f8d1bd not found: ID does not exist" containerID="91ec52c4f6fd17b99366ac76e5cb220299dd84af0a25c0368bf36cd642f8d1bd" Oct 11 05:10:56 crc kubenswrapper[4651]: I1011 05:10:56.201753 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"91ec52c4f6fd17b99366ac76e5cb220299dd84af0a25c0368bf36cd642f8d1bd"} err="failed to get container status \"91ec52c4f6fd17b99366ac76e5cb220299dd84af0a25c0368bf36cd642f8d1bd\": rpc error: code = NotFound desc = could not find container \"91ec52c4f6fd17b99366ac76e5cb220299dd84af0a25c0368bf36cd642f8d1bd\": container with ID starting with 91ec52c4f6fd17b99366ac76e5cb220299dd84af0a25c0368bf36cd642f8d1bd not found: ID does not exist" Oct 11 05:10:56 crc kubenswrapper[4651]: I1011 05:10:56.201770 4651 scope.go:117] "RemoveContainer" containerID="2a9b1cfa40f355f42e0681ef17c2223bc5651c0db8a36d51f5380df98223ad59" Oct 11 05:10:56 crc kubenswrapper[4651]: E1011 05:10:56.202062 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2a9b1cfa40f355f42e0681ef17c2223bc5651c0db8a36d51f5380df98223ad59\": container with ID starting with 2a9b1cfa40f355f42e0681ef17c2223bc5651c0db8a36d51f5380df98223ad59 not found: ID does not exist" containerID="2a9b1cfa40f355f42e0681ef17c2223bc5651c0db8a36d51f5380df98223ad59" Oct 11 05:10:56 crc kubenswrapper[4651]: I1011 05:10:56.202081 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2a9b1cfa40f355f42e0681ef17c2223bc5651c0db8a36d51f5380df98223ad59"} err="failed to get container status \"2a9b1cfa40f355f42e0681ef17c2223bc5651c0db8a36d51f5380df98223ad59\": rpc error: code = NotFound desc = could not find container \"2a9b1cfa40f355f42e0681ef17c2223bc5651c0db8a36d51f5380df98223ad59\": container with ID starting with 2a9b1cfa40f355f42e0681ef17c2223bc5651c0db8a36d51f5380df98223ad59 not found: ID does not exist" Oct 11 05:10:56 crc kubenswrapper[4651]: I1011 05:10:56.202094 4651 scope.go:117] "RemoveContainer" containerID="219dd73522998cd618c6f9c9561803b95ff3a4b743770fdda950016bd0acd214" Oct 11 05:10:56 crc kubenswrapper[4651]: E1011 05:10:56.202381 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"219dd73522998cd618c6f9c9561803b95ff3a4b743770fdda950016bd0acd214\": container with ID starting with 219dd73522998cd618c6f9c9561803b95ff3a4b743770fdda950016bd0acd214 not found: ID does not exist" containerID="219dd73522998cd618c6f9c9561803b95ff3a4b743770fdda950016bd0acd214" Oct 11 05:10:56 crc kubenswrapper[4651]: I1011 05:10:56.202403 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"219dd73522998cd618c6f9c9561803b95ff3a4b743770fdda950016bd0acd214"} err="failed to get container status \"219dd73522998cd618c6f9c9561803b95ff3a4b743770fdda950016bd0acd214\": rpc error: code = NotFound desc = could not find container \"219dd73522998cd618c6f9c9561803b95ff3a4b743770fdda950016bd0acd214\": container with ID starting with 219dd73522998cd618c6f9c9561803b95ff3a4b743770fdda950016bd0acd214 not found: ID does not exist" Oct 11 05:10:56 crc kubenswrapper[4651]: I1011 05:10:56.394475 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 11 05:10:56 crc kubenswrapper[4651]: I1011 05:10:56.404888 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 11 05:10:56 crc kubenswrapper[4651]: I1011 05:10:56.416133 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 11 05:10:56 crc kubenswrapper[4651]: E1011 05:10:56.416491 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a4717124-cc6a-4961-9828-a89d0132ba8a" containerName="init" Oct 11 05:10:56 crc kubenswrapper[4651]: I1011 05:10:56.416507 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="a4717124-cc6a-4961-9828-a89d0132ba8a" containerName="init" Oct 11 05:10:56 crc kubenswrapper[4651]: E1011 05:10:56.416534 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="074c84b3-f777-4c05-89dc-6e55dd72a2b9" containerName="ceilometer-notification-agent" Oct 11 05:10:56 crc kubenswrapper[4651]: I1011 05:10:56.416540 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="074c84b3-f777-4c05-89dc-6e55dd72a2b9" containerName="ceilometer-notification-agent" Oct 11 05:10:56 crc kubenswrapper[4651]: E1011 05:10:56.416557 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="074c84b3-f777-4c05-89dc-6e55dd72a2b9" containerName="sg-core" Oct 11 05:10:56 crc kubenswrapper[4651]: I1011 05:10:56.416563 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="074c84b3-f777-4c05-89dc-6e55dd72a2b9" containerName="sg-core" Oct 11 05:10:56 crc kubenswrapper[4651]: E1011 05:10:56.416575 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="074c84b3-f777-4c05-89dc-6e55dd72a2b9" containerName="ceilometer-central-agent" Oct 11 05:10:56 crc kubenswrapper[4651]: I1011 05:10:56.416580 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="074c84b3-f777-4c05-89dc-6e55dd72a2b9" containerName="ceilometer-central-agent" Oct 11 05:10:56 crc kubenswrapper[4651]: E1011 05:10:56.416590 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a4717124-cc6a-4961-9828-a89d0132ba8a" containerName="dnsmasq-dns" Oct 11 05:10:56 crc kubenswrapper[4651]: I1011 05:10:56.416596 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="a4717124-cc6a-4961-9828-a89d0132ba8a" containerName="dnsmasq-dns" Oct 11 05:10:56 crc kubenswrapper[4651]: E1011 05:10:56.416616 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="074c84b3-f777-4c05-89dc-6e55dd72a2b9" containerName="proxy-httpd" Oct 11 05:10:56 crc kubenswrapper[4651]: I1011 05:10:56.416622 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="074c84b3-f777-4c05-89dc-6e55dd72a2b9" containerName="proxy-httpd" Oct 11 05:10:56 crc kubenswrapper[4651]: I1011 05:10:56.416777 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="074c84b3-f777-4c05-89dc-6e55dd72a2b9" containerName="sg-core" Oct 11 05:10:56 crc kubenswrapper[4651]: I1011 05:10:56.416791 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="a4717124-cc6a-4961-9828-a89d0132ba8a" containerName="dnsmasq-dns" Oct 11 05:10:56 crc kubenswrapper[4651]: I1011 05:10:56.416800 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="074c84b3-f777-4c05-89dc-6e55dd72a2b9" containerName="ceilometer-central-agent" Oct 11 05:10:56 crc kubenswrapper[4651]: I1011 05:10:56.416813 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="074c84b3-f777-4c05-89dc-6e55dd72a2b9" containerName="ceilometer-notification-agent" Oct 11 05:10:56 crc kubenswrapper[4651]: I1011 05:10:56.416844 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="074c84b3-f777-4c05-89dc-6e55dd72a2b9" containerName="proxy-httpd" Oct 11 05:10:56 crc kubenswrapper[4651]: I1011 05:10:56.418642 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 11 05:10:56 crc kubenswrapper[4651]: I1011 05:10:56.424037 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Oct 11 05:10:56 crc kubenswrapper[4651]: I1011 05:10:56.424220 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 11 05:10:56 crc kubenswrapper[4651]: I1011 05:10:56.424335 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 11 05:10:56 crc kubenswrapper[4651]: I1011 05:10:56.424899 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 11 05:10:56 crc kubenswrapper[4651]: I1011 05:10:56.490418 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/123a74ae-c8a8-467a-b358-b13ac2cff461-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"123a74ae-c8a8-467a-b358-b13ac2cff461\") " pod="openstack/ceilometer-0" Oct 11 05:10:56 crc kubenswrapper[4651]: I1011 05:10:56.490560 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j24bq\" (UniqueName: \"kubernetes.io/projected/123a74ae-c8a8-467a-b358-b13ac2cff461-kube-api-access-j24bq\") pod \"ceilometer-0\" (UID: \"123a74ae-c8a8-467a-b358-b13ac2cff461\") " pod="openstack/ceilometer-0" Oct 11 05:10:56 crc kubenswrapper[4651]: I1011 05:10:56.490600 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/123a74ae-c8a8-467a-b358-b13ac2cff461-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"123a74ae-c8a8-467a-b358-b13ac2cff461\") " pod="openstack/ceilometer-0" Oct 11 05:10:56 crc kubenswrapper[4651]: I1011 05:10:56.490653 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/123a74ae-c8a8-467a-b358-b13ac2cff461-config-data\") pod \"ceilometer-0\" (UID: \"123a74ae-c8a8-467a-b358-b13ac2cff461\") " pod="openstack/ceilometer-0" Oct 11 05:10:56 crc kubenswrapper[4651]: I1011 05:10:56.490679 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/123a74ae-c8a8-467a-b358-b13ac2cff461-scripts\") pod \"ceilometer-0\" (UID: \"123a74ae-c8a8-467a-b358-b13ac2cff461\") " pod="openstack/ceilometer-0" Oct 11 05:10:56 crc kubenswrapper[4651]: I1011 05:10:56.490708 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/123a74ae-c8a8-467a-b358-b13ac2cff461-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"123a74ae-c8a8-467a-b358-b13ac2cff461\") " pod="openstack/ceilometer-0" Oct 11 05:10:56 crc kubenswrapper[4651]: I1011 05:10:56.490742 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/123a74ae-c8a8-467a-b358-b13ac2cff461-log-httpd\") pod \"ceilometer-0\" (UID: \"123a74ae-c8a8-467a-b358-b13ac2cff461\") " pod="openstack/ceilometer-0" Oct 11 05:10:56 crc kubenswrapper[4651]: I1011 05:10:56.490804 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/123a74ae-c8a8-467a-b358-b13ac2cff461-run-httpd\") pod \"ceilometer-0\" (UID: \"123a74ae-c8a8-467a-b358-b13ac2cff461\") " pod="openstack/ceilometer-0" Oct 11 05:10:56 crc kubenswrapper[4651]: I1011 05:10:56.597191 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/123a74ae-c8a8-467a-b358-b13ac2cff461-run-httpd\") pod \"ceilometer-0\" (UID: \"123a74ae-c8a8-467a-b358-b13ac2cff461\") " pod="openstack/ceilometer-0" Oct 11 05:10:56 crc kubenswrapper[4651]: I1011 05:10:56.597385 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/123a74ae-c8a8-467a-b358-b13ac2cff461-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"123a74ae-c8a8-467a-b358-b13ac2cff461\") " pod="openstack/ceilometer-0" Oct 11 05:10:56 crc kubenswrapper[4651]: I1011 05:10:56.597619 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/123a74ae-c8a8-467a-b358-b13ac2cff461-run-httpd\") pod \"ceilometer-0\" (UID: \"123a74ae-c8a8-467a-b358-b13ac2cff461\") " pod="openstack/ceilometer-0" Oct 11 05:10:56 crc kubenswrapper[4651]: I1011 05:10:56.597680 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j24bq\" (UniqueName: \"kubernetes.io/projected/123a74ae-c8a8-467a-b358-b13ac2cff461-kube-api-access-j24bq\") pod \"ceilometer-0\" (UID: \"123a74ae-c8a8-467a-b358-b13ac2cff461\") " pod="openstack/ceilometer-0" Oct 11 05:10:56 crc kubenswrapper[4651]: I1011 05:10:56.597716 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/123a74ae-c8a8-467a-b358-b13ac2cff461-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"123a74ae-c8a8-467a-b358-b13ac2cff461\") " pod="openstack/ceilometer-0" Oct 11 05:10:56 crc kubenswrapper[4651]: I1011 05:10:56.597747 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/123a74ae-c8a8-467a-b358-b13ac2cff461-config-data\") pod \"ceilometer-0\" (UID: \"123a74ae-c8a8-467a-b358-b13ac2cff461\") " pod="openstack/ceilometer-0" Oct 11 05:10:56 crc kubenswrapper[4651]: I1011 05:10:56.597763 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/123a74ae-c8a8-467a-b358-b13ac2cff461-scripts\") pod \"ceilometer-0\" (UID: \"123a74ae-c8a8-467a-b358-b13ac2cff461\") " pod="openstack/ceilometer-0" Oct 11 05:10:56 crc kubenswrapper[4651]: I1011 05:10:56.597788 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/123a74ae-c8a8-467a-b358-b13ac2cff461-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"123a74ae-c8a8-467a-b358-b13ac2cff461\") " pod="openstack/ceilometer-0" Oct 11 05:10:56 crc kubenswrapper[4651]: I1011 05:10:56.597831 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/123a74ae-c8a8-467a-b358-b13ac2cff461-log-httpd\") pod \"ceilometer-0\" (UID: \"123a74ae-c8a8-467a-b358-b13ac2cff461\") " pod="openstack/ceilometer-0" Oct 11 05:10:56 crc kubenswrapper[4651]: I1011 05:10:56.598107 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/123a74ae-c8a8-467a-b358-b13ac2cff461-log-httpd\") pod \"ceilometer-0\" (UID: \"123a74ae-c8a8-467a-b358-b13ac2cff461\") " pod="openstack/ceilometer-0" Oct 11 05:10:56 crc kubenswrapper[4651]: I1011 05:10:56.604135 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/123a74ae-c8a8-467a-b358-b13ac2cff461-config-data\") pod \"ceilometer-0\" (UID: \"123a74ae-c8a8-467a-b358-b13ac2cff461\") " pod="openstack/ceilometer-0" Oct 11 05:10:56 crc kubenswrapper[4651]: I1011 05:10:56.604344 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/123a74ae-c8a8-467a-b358-b13ac2cff461-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"123a74ae-c8a8-467a-b358-b13ac2cff461\") " pod="openstack/ceilometer-0" Oct 11 05:10:56 crc kubenswrapper[4651]: I1011 05:10:56.607016 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/123a74ae-c8a8-467a-b358-b13ac2cff461-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"123a74ae-c8a8-467a-b358-b13ac2cff461\") " pod="openstack/ceilometer-0" Oct 11 05:10:56 crc kubenswrapper[4651]: I1011 05:10:56.611610 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/123a74ae-c8a8-467a-b358-b13ac2cff461-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"123a74ae-c8a8-467a-b358-b13ac2cff461\") " pod="openstack/ceilometer-0" Oct 11 05:10:56 crc kubenswrapper[4651]: I1011 05:10:56.612978 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/123a74ae-c8a8-467a-b358-b13ac2cff461-scripts\") pod \"ceilometer-0\" (UID: \"123a74ae-c8a8-467a-b358-b13ac2cff461\") " pod="openstack/ceilometer-0" Oct 11 05:10:56 crc kubenswrapper[4651]: I1011 05:10:56.619487 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j24bq\" (UniqueName: \"kubernetes.io/projected/123a74ae-c8a8-467a-b358-b13ac2cff461-kube-api-access-j24bq\") pod \"ceilometer-0\" (UID: \"123a74ae-c8a8-467a-b358-b13ac2cff461\") " pod="openstack/ceilometer-0" Oct 11 05:10:56 crc kubenswrapper[4651]: I1011 05:10:56.738154 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 11 05:10:57 crc kubenswrapper[4651]: I1011 05:10:57.227075 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 11 05:10:57 crc kubenswrapper[4651]: I1011 05:10:57.886923 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="074c84b3-f777-4c05-89dc-6e55dd72a2b9" path="/var/lib/kubelet/pods/074c84b3-f777-4c05-89dc-6e55dd72a2b9/volumes" Oct 11 05:10:57 crc kubenswrapper[4651]: I1011 05:10:57.888246 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a4717124-cc6a-4961-9828-a89d0132ba8a" path="/var/lib/kubelet/pods/a4717124-cc6a-4961-9828-a89d0132ba8a/volumes" Oct 11 05:10:58 crc kubenswrapper[4651]: I1011 05:10:58.051890 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"123a74ae-c8a8-467a-b358-b13ac2cff461","Type":"ContainerStarted","Data":"11b81083e3d60faa2016150f194e8c0d064ea24e4d26ff294ed49aa99f3edd18"} Oct 11 05:10:58 crc kubenswrapper[4651]: I1011 05:10:58.051926 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"123a74ae-c8a8-467a-b358-b13ac2cff461","Type":"ContainerStarted","Data":"8d8897554b14cc36e668bdc320b253be6ec746411e69aa840067b65fd1d4bcf8"} Oct 11 05:10:59 crc kubenswrapper[4651]: I1011 05:10:59.060337 4651 generic.go:334] "Generic (PLEG): container finished" podID="8ea769ce-f3c9-481f-9544-0d096ba2d0c8" containerID="2ff1626a641a3a945708d87d3446cb08598ac5bcf8b714ead332dd45584e7ea7" exitCode=0 Oct 11 05:10:59 crc kubenswrapper[4651]: I1011 05:10:59.060391 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-9mvdn" event={"ID":"8ea769ce-f3c9-481f-9544-0d096ba2d0c8","Type":"ContainerDied","Data":"2ff1626a641a3a945708d87d3446cb08598ac5bcf8b714ead332dd45584e7ea7"} Oct 11 05:10:59 crc kubenswrapper[4651]: I1011 05:10:59.062081 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"123a74ae-c8a8-467a-b358-b13ac2cff461","Type":"ContainerStarted","Data":"c57a4ed0cbaa5352969014e87298b6d35161d8d3bf80c25f9a89c177661928bf"} Oct 11 05:11:00 crc kubenswrapper[4651]: I1011 05:11:00.072550 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"123a74ae-c8a8-467a-b358-b13ac2cff461","Type":"ContainerStarted","Data":"bde0526a2a17596b890880c4ae96363bfed478b6e0a56db5f02dfb03fee19aca"} Oct 11 05:11:00 crc kubenswrapper[4651]: I1011 05:11:00.461037 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-9mvdn" Oct 11 05:11:00 crc kubenswrapper[4651]: I1011 05:11:00.624791 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8ea769ce-f3c9-481f-9544-0d096ba2d0c8-scripts\") pod \"8ea769ce-f3c9-481f-9544-0d096ba2d0c8\" (UID: \"8ea769ce-f3c9-481f-9544-0d096ba2d0c8\") " Oct 11 05:11:00 crc kubenswrapper[4651]: I1011 05:11:00.625123 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ea769ce-f3c9-481f-9544-0d096ba2d0c8-config-data\") pod \"8ea769ce-f3c9-481f-9544-0d096ba2d0c8\" (UID: \"8ea769ce-f3c9-481f-9544-0d096ba2d0c8\") " Oct 11 05:11:00 crc kubenswrapper[4651]: I1011 05:11:00.625272 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v897z\" (UniqueName: \"kubernetes.io/projected/8ea769ce-f3c9-481f-9544-0d096ba2d0c8-kube-api-access-v897z\") pod \"8ea769ce-f3c9-481f-9544-0d096ba2d0c8\" (UID: \"8ea769ce-f3c9-481f-9544-0d096ba2d0c8\") " Oct 11 05:11:00 crc kubenswrapper[4651]: I1011 05:11:00.625449 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ea769ce-f3c9-481f-9544-0d096ba2d0c8-combined-ca-bundle\") pod \"8ea769ce-f3c9-481f-9544-0d096ba2d0c8\" (UID: \"8ea769ce-f3c9-481f-9544-0d096ba2d0c8\") " Oct 11 05:11:00 crc kubenswrapper[4651]: I1011 05:11:00.631764 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8ea769ce-f3c9-481f-9544-0d096ba2d0c8-kube-api-access-v897z" (OuterVolumeSpecName: "kube-api-access-v897z") pod "8ea769ce-f3c9-481f-9544-0d096ba2d0c8" (UID: "8ea769ce-f3c9-481f-9544-0d096ba2d0c8"). InnerVolumeSpecName "kube-api-access-v897z". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:11:00 crc kubenswrapper[4651]: I1011 05:11:00.633471 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8ea769ce-f3c9-481f-9544-0d096ba2d0c8-scripts" (OuterVolumeSpecName: "scripts") pod "8ea769ce-f3c9-481f-9544-0d096ba2d0c8" (UID: "8ea769ce-f3c9-481f-9544-0d096ba2d0c8"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:11:00 crc kubenswrapper[4651]: I1011 05:11:00.685063 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8ea769ce-f3c9-481f-9544-0d096ba2d0c8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8ea769ce-f3c9-481f-9544-0d096ba2d0c8" (UID: "8ea769ce-f3c9-481f-9544-0d096ba2d0c8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:11:00 crc kubenswrapper[4651]: I1011 05:11:00.697118 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8ea769ce-f3c9-481f-9544-0d096ba2d0c8-config-data" (OuterVolumeSpecName: "config-data") pod "8ea769ce-f3c9-481f-9544-0d096ba2d0c8" (UID: "8ea769ce-f3c9-481f-9544-0d096ba2d0c8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:11:00 crc kubenswrapper[4651]: I1011 05:11:00.727312 4651 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8ea769ce-f3c9-481f-9544-0d096ba2d0c8-scripts\") on node \"crc\" DevicePath \"\"" Oct 11 05:11:00 crc kubenswrapper[4651]: I1011 05:11:00.727345 4651 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ea769ce-f3c9-481f-9544-0d096ba2d0c8-config-data\") on node \"crc\" DevicePath \"\"" Oct 11 05:11:00 crc kubenswrapper[4651]: I1011 05:11:00.727357 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v897z\" (UniqueName: \"kubernetes.io/projected/8ea769ce-f3c9-481f-9544-0d096ba2d0c8-kube-api-access-v897z\") on node \"crc\" DevicePath \"\"" Oct 11 05:11:00 crc kubenswrapper[4651]: I1011 05:11:00.727369 4651 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ea769ce-f3c9-481f-9544-0d096ba2d0c8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 05:11:01 crc kubenswrapper[4651]: I1011 05:11:01.082258 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-9mvdn" event={"ID":"8ea769ce-f3c9-481f-9544-0d096ba2d0c8","Type":"ContainerDied","Data":"6c73c666791eea40ca3f41c8130160ac65a99be622c608e8357d49980c57343f"} Oct 11 05:11:01 crc kubenswrapper[4651]: I1011 05:11:01.082553 4651 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6c73c666791eea40ca3f41c8130160ac65a99be622c608e8357d49980c57343f" Oct 11 05:11:01 crc kubenswrapper[4651]: I1011 05:11:01.082298 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-9mvdn" Oct 11 05:11:01 crc kubenswrapper[4651]: I1011 05:11:01.274939 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 11 05:11:01 crc kubenswrapper[4651]: I1011 05:11:01.275205 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="701bb8ef-4d1a-4c19-b102-f57ace7ce0eb" containerName="nova-api-log" containerID="cri-o://8f62314e5537dcaf84c8da5ad9f9826ec12a97b6c6855430dee5491123e6517e" gracePeriod=30 Oct 11 05:11:01 crc kubenswrapper[4651]: I1011 05:11:01.275349 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="701bb8ef-4d1a-4c19-b102-f57ace7ce0eb" containerName="nova-api-api" containerID="cri-o://3fc618a356631d18389c5a3acc001352a30b83769ac26c3cc298bbf5474e8c74" gracePeriod=30 Oct 11 05:11:01 crc kubenswrapper[4651]: I1011 05:11:01.285878 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 11 05:11:01 crc kubenswrapper[4651]: I1011 05:11:01.286183 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="df04f80e-387c-40c8-a293-97cf7d3f4c79" containerName="nova-scheduler-scheduler" containerID="cri-o://531973352bb051d3570df2038989aef5dc001e82e263fce7d939afcff1e3e6c7" gracePeriod=30 Oct 11 05:11:01 crc kubenswrapper[4651]: I1011 05:11:01.300535 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 11 05:11:01 crc kubenswrapper[4651]: I1011 05:11:01.307204 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="29a549a6-e4d5-41c6-8042-d6a88f17fa94" containerName="nova-metadata-log" containerID="cri-o://069ff3e520fa9810a7879f49294ab184f3ea5b81ae4dfe4ac1e021c70b5be95d" gracePeriod=30 Oct 11 05:11:01 crc kubenswrapper[4651]: I1011 05:11:01.307409 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="29a549a6-e4d5-41c6-8042-d6a88f17fa94" containerName="nova-metadata-metadata" containerID="cri-o://d261e942118d66979e4108db8d86a64b96d0bb437acdfd1c0130c3a62af2635d" gracePeriod=30 Oct 11 05:11:01 crc kubenswrapper[4651]: I1011 05:11:01.777121 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 11 05:11:01 crc kubenswrapper[4651]: I1011 05:11:01.960036 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/701bb8ef-4d1a-4c19-b102-f57ace7ce0eb-logs\") pod \"701bb8ef-4d1a-4c19-b102-f57ace7ce0eb\" (UID: \"701bb8ef-4d1a-4c19-b102-f57ace7ce0eb\") " Oct 11 05:11:01 crc kubenswrapper[4651]: I1011 05:11:01.960545 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/701bb8ef-4d1a-4c19-b102-f57ace7ce0eb-public-tls-certs\") pod \"701bb8ef-4d1a-4c19-b102-f57ace7ce0eb\" (UID: \"701bb8ef-4d1a-4c19-b102-f57ace7ce0eb\") " Oct 11 05:11:01 crc kubenswrapper[4651]: I1011 05:11:01.960583 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/701bb8ef-4d1a-4c19-b102-f57ace7ce0eb-combined-ca-bundle\") pod \"701bb8ef-4d1a-4c19-b102-f57ace7ce0eb\" (UID: \"701bb8ef-4d1a-4c19-b102-f57ace7ce0eb\") " Oct 11 05:11:01 crc kubenswrapper[4651]: I1011 05:11:01.960623 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w622c\" (UniqueName: \"kubernetes.io/projected/701bb8ef-4d1a-4c19-b102-f57ace7ce0eb-kube-api-access-w622c\") pod \"701bb8ef-4d1a-4c19-b102-f57ace7ce0eb\" (UID: \"701bb8ef-4d1a-4c19-b102-f57ace7ce0eb\") " Oct 11 05:11:01 crc kubenswrapper[4651]: I1011 05:11:01.960679 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/701bb8ef-4d1a-4c19-b102-f57ace7ce0eb-internal-tls-certs\") pod \"701bb8ef-4d1a-4c19-b102-f57ace7ce0eb\" (UID: \"701bb8ef-4d1a-4c19-b102-f57ace7ce0eb\") " Oct 11 05:11:01 crc kubenswrapper[4651]: I1011 05:11:01.960724 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/701bb8ef-4d1a-4c19-b102-f57ace7ce0eb-config-data\") pod \"701bb8ef-4d1a-4c19-b102-f57ace7ce0eb\" (UID: \"701bb8ef-4d1a-4c19-b102-f57ace7ce0eb\") " Oct 11 05:11:01 crc kubenswrapper[4651]: I1011 05:11:01.961359 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/701bb8ef-4d1a-4c19-b102-f57ace7ce0eb-logs" (OuterVolumeSpecName: "logs") pod "701bb8ef-4d1a-4c19-b102-f57ace7ce0eb" (UID: "701bb8ef-4d1a-4c19-b102-f57ace7ce0eb"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 05:11:01 crc kubenswrapper[4651]: I1011 05:11:01.965786 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/701bb8ef-4d1a-4c19-b102-f57ace7ce0eb-kube-api-access-w622c" (OuterVolumeSpecName: "kube-api-access-w622c") pod "701bb8ef-4d1a-4c19-b102-f57ace7ce0eb" (UID: "701bb8ef-4d1a-4c19-b102-f57ace7ce0eb"). InnerVolumeSpecName "kube-api-access-w622c". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:11:01 crc kubenswrapper[4651]: I1011 05:11:01.994933 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/701bb8ef-4d1a-4c19-b102-f57ace7ce0eb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "701bb8ef-4d1a-4c19-b102-f57ace7ce0eb" (UID: "701bb8ef-4d1a-4c19-b102-f57ace7ce0eb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:11:02 crc kubenswrapper[4651]: I1011 05:11:02.010297 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/701bb8ef-4d1a-4c19-b102-f57ace7ce0eb-config-data" (OuterVolumeSpecName: "config-data") pod "701bb8ef-4d1a-4c19-b102-f57ace7ce0eb" (UID: "701bb8ef-4d1a-4c19-b102-f57ace7ce0eb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:11:02 crc kubenswrapper[4651]: I1011 05:11:02.022655 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/701bb8ef-4d1a-4c19-b102-f57ace7ce0eb-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "701bb8ef-4d1a-4c19-b102-f57ace7ce0eb" (UID: "701bb8ef-4d1a-4c19-b102-f57ace7ce0eb"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:11:02 crc kubenswrapper[4651]: I1011 05:11:02.029477 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/701bb8ef-4d1a-4c19-b102-f57ace7ce0eb-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "701bb8ef-4d1a-4c19-b102-f57ace7ce0eb" (UID: "701bb8ef-4d1a-4c19-b102-f57ace7ce0eb"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:11:02 crc kubenswrapper[4651]: I1011 05:11:02.062561 4651 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/701bb8ef-4d1a-4c19-b102-f57ace7ce0eb-config-data\") on node \"crc\" DevicePath \"\"" Oct 11 05:11:02 crc kubenswrapper[4651]: I1011 05:11:02.062595 4651 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/701bb8ef-4d1a-4c19-b102-f57ace7ce0eb-logs\") on node \"crc\" DevicePath \"\"" Oct 11 05:11:02 crc kubenswrapper[4651]: I1011 05:11:02.062646 4651 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/701bb8ef-4d1a-4c19-b102-f57ace7ce0eb-public-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 11 05:11:02 crc kubenswrapper[4651]: I1011 05:11:02.062660 4651 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/701bb8ef-4d1a-4c19-b102-f57ace7ce0eb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 05:11:02 crc kubenswrapper[4651]: I1011 05:11:02.062697 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w622c\" (UniqueName: \"kubernetes.io/projected/701bb8ef-4d1a-4c19-b102-f57ace7ce0eb-kube-api-access-w622c\") on node \"crc\" DevicePath \"\"" Oct 11 05:11:02 crc kubenswrapper[4651]: I1011 05:11:02.062710 4651 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/701bb8ef-4d1a-4c19-b102-f57ace7ce0eb-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 11 05:11:02 crc kubenswrapper[4651]: I1011 05:11:02.097002 4651 generic.go:334] "Generic (PLEG): container finished" podID="29a549a6-e4d5-41c6-8042-d6a88f17fa94" containerID="069ff3e520fa9810a7879f49294ab184f3ea5b81ae4dfe4ac1e021c70b5be95d" exitCode=143 Oct 11 05:11:02 crc kubenswrapper[4651]: I1011 05:11:02.097302 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"29a549a6-e4d5-41c6-8042-d6a88f17fa94","Type":"ContainerDied","Data":"069ff3e520fa9810a7879f49294ab184f3ea5b81ae4dfe4ac1e021c70b5be95d"} Oct 11 05:11:02 crc kubenswrapper[4651]: I1011 05:11:02.099249 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"123a74ae-c8a8-467a-b358-b13ac2cff461","Type":"ContainerStarted","Data":"8516ea7c684c1315554daaad4578b918d174962d14d015c526f6b37df275d35d"} Oct 11 05:11:02 crc kubenswrapper[4651]: I1011 05:11:02.099723 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 11 05:11:02 crc kubenswrapper[4651]: I1011 05:11:02.104050 4651 generic.go:334] "Generic (PLEG): container finished" podID="701bb8ef-4d1a-4c19-b102-f57ace7ce0eb" containerID="3fc618a356631d18389c5a3acc001352a30b83769ac26c3cc298bbf5474e8c74" exitCode=0 Oct 11 05:11:02 crc kubenswrapper[4651]: I1011 05:11:02.104085 4651 generic.go:334] "Generic (PLEG): container finished" podID="701bb8ef-4d1a-4c19-b102-f57ace7ce0eb" containerID="8f62314e5537dcaf84c8da5ad9f9826ec12a97b6c6855430dee5491123e6517e" exitCode=143 Oct 11 05:11:02 crc kubenswrapper[4651]: I1011 05:11:02.104126 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"701bb8ef-4d1a-4c19-b102-f57ace7ce0eb","Type":"ContainerDied","Data":"3fc618a356631d18389c5a3acc001352a30b83769ac26c3cc298bbf5474e8c74"} Oct 11 05:11:02 crc kubenswrapper[4651]: I1011 05:11:02.104197 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"701bb8ef-4d1a-4c19-b102-f57ace7ce0eb","Type":"ContainerDied","Data":"8f62314e5537dcaf84c8da5ad9f9826ec12a97b6c6855430dee5491123e6517e"} Oct 11 05:11:02 crc kubenswrapper[4651]: I1011 05:11:02.104207 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"701bb8ef-4d1a-4c19-b102-f57ace7ce0eb","Type":"ContainerDied","Data":"f9ef0e94a344285e19fc31eb28437f0f39b561d645d772ff3d978d3322380e35"} Oct 11 05:11:02 crc kubenswrapper[4651]: I1011 05:11:02.104235 4651 scope.go:117] "RemoveContainer" containerID="3fc618a356631d18389c5a3acc001352a30b83769ac26c3cc298bbf5474e8c74" Oct 11 05:11:02 crc kubenswrapper[4651]: I1011 05:11:02.104612 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 11 05:11:02 crc kubenswrapper[4651]: I1011 05:11:02.153761 4651 scope.go:117] "RemoveContainer" containerID="8f62314e5537dcaf84c8da5ad9f9826ec12a97b6c6855430dee5491123e6517e" Oct 11 05:11:02 crc kubenswrapper[4651]: I1011 05:11:02.164239 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.193334381 podStartE2EDuration="6.164221506s" podCreationTimestamp="2025-10-11 05:10:56 +0000 UTC" firstStartedPulling="2025-10-11 05:10:57.224879044 +0000 UTC m=+1178.121111850" lastFinishedPulling="2025-10-11 05:11:01.195766179 +0000 UTC m=+1182.091998975" observedRunningTime="2025-10-11 05:11:02.130264779 +0000 UTC m=+1183.026497575" watchObservedRunningTime="2025-10-11 05:11:02.164221506 +0000 UTC m=+1183.060454302" Oct 11 05:11:02 crc kubenswrapper[4651]: I1011 05:11:02.177287 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 11 05:11:02 crc kubenswrapper[4651]: I1011 05:11:02.184243 4651 scope.go:117] "RemoveContainer" containerID="3fc618a356631d18389c5a3acc001352a30b83769ac26c3cc298bbf5474e8c74" Oct 11 05:11:02 crc kubenswrapper[4651]: E1011 05:11:02.184698 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3fc618a356631d18389c5a3acc001352a30b83769ac26c3cc298bbf5474e8c74\": container with ID starting with 3fc618a356631d18389c5a3acc001352a30b83769ac26c3cc298bbf5474e8c74 not found: ID does not exist" containerID="3fc618a356631d18389c5a3acc001352a30b83769ac26c3cc298bbf5474e8c74" Oct 11 05:11:02 crc kubenswrapper[4651]: I1011 05:11:02.184761 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3fc618a356631d18389c5a3acc001352a30b83769ac26c3cc298bbf5474e8c74"} err="failed to get container status \"3fc618a356631d18389c5a3acc001352a30b83769ac26c3cc298bbf5474e8c74\": rpc error: code = NotFound desc = could not find container \"3fc618a356631d18389c5a3acc001352a30b83769ac26c3cc298bbf5474e8c74\": container with ID starting with 3fc618a356631d18389c5a3acc001352a30b83769ac26c3cc298bbf5474e8c74 not found: ID does not exist" Oct 11 05:11:02 crc kubenswrapper[4651]: I1011 05:11:02.184788 4651 scope.go:117] "RemoveContainer" containerID="8f62314e5537dcaf84c8da5ad9f9826ec12a97b6c6855430dee5491123e6517e" Oct 11 05:11:02 crc kubenswrapper[4651]: E1011 05:11:02.185101 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8f62314e5537dcaf84c8da5ad9f9826ec12a97b6c6855430dee5491123e6517e\": container with ID starting with 8f62314e5537dcaf84c8da5ad9f9826ec12a97b6c6855430dee5491123e6517e not found: ID does not exist" containerID="8f62314e5537dcaf84c8da5ad9f9826ec12a97b6c6855430dee5491123e6517e" Oct 11 05:11:02 crc kubenswrapper[4651]: I1011 05:11:02.185157 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8f62314e5537dcaf84c8da5ad9f9826ec12a97b6c6855430dee5491123e6517e"} err="failed to get container status \"8f62314e5537dcaf84c8da5ad9f9826ec12a97b6c6855430dee5491123e6517e\": rpc error: code = NotFound desc = could not find container \"8f62314e5537dcaf84c8da5ad9f9826ec12a97b6c6855430dee5491123e6517e\": container with ID starting with 8f62314e5537dcaf84c8da5ad9f9826ec12a97b6c6855430dee5491123e6517e not found: ID does not exist" Oct 11 05:11:02 crc kubenswrapper[4651]: I1011 05:11:02.185184 4651 scope.go:117] "RemoveContainer" containerID="3fc618a356631d18389c5a3acc001352a30b83769ac26c3cc298bbf5474e8c74" Oct 11 05:11:02 crc kubenswrapper[4651]: I1011 05:11:02.185707 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3fc618a356631d18389c5a3acc001352a30b83769ac26c3cc298bbf5474e8c74"} err="failed to get container status \"3fc618a356631d18389c5a3acc001352a30b83769ac26c3cc298bbf5474e8c74\": rpc error: code = NotFound desc = could not find container \"3fc618a356631d18389c5a3acc001352a30b83769ac26c3cc298bbf5474e8c74\": container with ID starting with 3fc618a356631d18389c5a3acc001352a30b83769ac26c3cc298bbf5474e8c74 not found: ID does not exist" Oct 11 05:11:02 crc kubenswrapper[4651]: I1011 05:11:02.185730 4651 scope.go:117] "RemoveContainer" containerID="8f62314e5537dcaf84c8da5ad9f9826ec12a97b6c6855430dee5491123e6517e" Oct 11 05:11:02 crc kubenswrapper[4651]: I1011 05:11:02.186666 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Oct 11 05:11:02 crc kubenswrapper[4651]: I1011 05:11:02.188114 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8f62314e5537dcaf84c8da5ad9f9826ec12a97b6c6855430dee5491123e6517e"} err="failed to get container status \"8f62314e5537dcaf84c8da5ad9f9826ec12a97b6c6855430dee5491123e6517e\": rpc error: code = NotFound desc = could not find container \"8f62314e5537dcaf84c8da5ad9f9826ec12a97b6c6855430dee5491123e6517e\": container with ID starting with 8f62314e5537dcaf84c8da5ad9f9826ec12a97b6c6855430dee5491123e6517e not found: ID does not exist" Oct 11 05:11:02 crc kubenswrapper[4651]: I1011 05:11:02.194346 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Oct 11 05:11:02 crc kubenswrapper[4651]: E1011 05:11:02.194922 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="701bb8ef-4d1a-4c19-b102-f57ace7ce0eb" containerName="nova-api-api" Oct 11 05:11:02 crc kubenswrapper[4651]: I1011 05:11:02.194938 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="701bb8ef-4d1a-4c19-b102-f57ace7ce0eb" containerName="nova-api-api" Oct 11 05:11:02 crc kubenswrapper[4651]: E1011 05:11:02.194977 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="701bb8ef-4d1a-4c19-b102-f57ace7ce0eb" containerName="nova-api-log" Oct 11 05:11:02 crc kubenswrapper[4651]: I1011 05:11:02.194984 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="701bb8ef-4d1a-4c19-b102-f57ace7ce0eb" containerName="nova-api-log" Oct 11 05:11:02 crc kubenswrapper[4651]: E1011 05:11:02.195012 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ea769ce-f3c9-481f-9544-0d096ba2d0c8" containerName="nova-manage" Oct 11 05:11:02 crc kubenswrapper[4651]: I1011 05:11:02.195018 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ea769ce-f3c9-481f-9544-0d096ba2d0c8" containerName="nova-manage" Oct 11 05:11:02 crc kubenswrapper[4651]: I1011 05:11:02.195197 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="701bb8ef-4d1a-4c19-b102-f57ace7ce0eb" containerName="nova-api-log" Oct 11 05:11:02 crc kubenswrapper[4651]: I1011 05:11:02.195209 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="701bb8ef-4d1a-4c19-b102-f57ace7ce0eb" containerName="nova-api-api" Oct 11 05:11:02 crc kubenswrapper[4651]: I1011 05:11:02.195227 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ea769ce-f3c9-481f-9544-0d096ba2d0c8" containerName="nova-manage" Oct 11 05:11:02 crc kubenswrapper[4651]: I1011 05:11:02.196543 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 11 05:11:02 crc kubenswrapper[4651]: I1011 05:11:02.199670 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Oct 11 05:11:02 crc kubenswrapper[4651]: I1011 05:11:02.199865 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Oct 11 05:11:02 crc kubenswrapper[4651]: I1011 05:11:02.199989 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Oct 11 05:11:02 crc kubenswrapper[4651]: I1011 05:11:02.204023 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 11 05:11:02 crc kubenswrapper[4651]: I1011 05:11:02.369118 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4791f0e9-7fb3-4b2e-8e67-52e1eb0fde76-internal-tls-certs\") pod \"nova-api-0\" (UID: \"4791f0e9-7fb3-4b2e-8e67-52e1eb0fde76\") " pod="openstack/nova-api-0" Oct 11 05:11:02 crc kubenswrapper[4651]: I1011 05:11:02.369205 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4791f0e9-7fb3-4b2e-8e67-52e1eb0fde76-logs\") pod \"nova-api-0\" (UID: \"4791f0e9-7fb3-4b2e-8e67-52e1eb0fde76\") " pod="openstack/nova-api-0" Oct 11 05:11:02 crc kubenswrapper[4651]: I1011 05:11:02.369258 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4791f0e9-7fb3-4b2e-8e67-52e1eb0fde76-public-tls-certs\") pod \"nova-api-0\" (UID: \"4791f0e9-7fb3-4b2e-8e67-52e1eb0fde76\") " pod="openstack/nova-api-0" Oct 11 05:11:02 crc kubenswrapper[4651]: I1011 05:11:02.369282 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p79wt\" (UniqueName: \"kubernetes.io/projected/4791f0e9-7fb3-4b2e-8e67-52e1eb0fde76-kube-api-access-p79wt\") pod \"nova-api-0\" (UID: \"4791f0e9-7fb3-4b2e-8e67-52e1eb0fde76\") " pod="openstack/nova-api-0" Oct 11 05:11:02 crc kubenswrapper[4651]: I1011 05:11:02.369304 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4791f0e9-7fb3-4b2e-8e67-52e1eb0fde76-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"4791f0e9-7fb3-4b2e-8e67-52e1eb0fde76\") " pod="openstack/nova-api-0" Oct 11 05:11:02 crc kubenswrapper[4651]: I1011 05:11:02.369332 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4791f0e9-7fb3-4b2e-8e67-52e1eb0fde76-config-data\") pod \"nova-api-0\" (UID: \"4791f0e9-7fb3-4b2e-8e67-52e1eb0fde76\") " pod="openstack/nova-api-0" Oct 11 05:11:02 crc kubenswrapper[4651]: I1011 05:11:02.471267 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4791f0e9-7fb3-4b2e-8e67-52e1eb0fde76-internal-tls-certs\") pod \"nova-api-0\" (UID: \"4791f0e9-7fb3-4b2e-8e67-52e1eb0fde76\") " pod="openstack/nova-api-0" Oct 11 05:11:02 crc kubenswrapper[4651]: I1011 05:11:02.471814 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4791f0e9-7fb3-4b2e-8e67-52e1eb0fde76-logs\") pod \"nova-api-0\" (UID: \"4791f0e9-7fb3-4b2e-8e67-52e1eb0fde76\") " pod="openstack/nova-api-0" Oct 11 05:11:02 crc kubenswrapper[4651]: I1011 05:11:02.472141 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4791f0e9-7fb3-4b2e-8e67-52e1eb0fde76-public-tls-certs\") pod \"nova-api-0\" (UID: \"4791f0e9-7fb3-4b2e-8e67-52e1eb0fde76\") " pod="openstack/nova-api-0" Oct 11 05:11:02 crc kubenswrapper[4651]: I1011 05:11:02.472251 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4791f0e9-7fb3-4b2e-8e67-52e1eb0fde76-logs\") pod \"nova-api-0\" (UID: \"4791f0e9-7fb3-4b2e-8e67-52e1eb0fde76\") " pod="openstack/nova-api-0" Oct 11 05:11:02 crc kubenswrapper[4651]: I1011 05:11:02.472588 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p79wt\" (UniqueName: \"kubernetes.io/projected/4791f0e9-7fb3-4b2e-8e67-52e1eb0fde76-kube-api-access-p79wt\") pod \"nova-api-0\" (UID: \"4791f0e9-7fb3-4b2e-8e67-52e1eb0fde76\") " pod="openstack/nova-api-0" Oct 11 05:11:02 crc kubenswrapper[4651]: I1011 05:11:02.473342 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4791f0e9-7fb3-4b2e-8e67-52e1eb0fde76-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"4791f0e9-7fb3-4b2e-8e67-52e1eb0fde76\") " pod="openstack/nova-api-0" Oct 11 05:11:02 crc kubenswrapper[4651]: I1011 05:11:02.473527 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4791f0e9-7fb3-4b2e-8e67-52e1eb0fde76-config-data\") pod \"nova-api-0\" (UID: \"4791f0e9-7fb3-4b2e-8e67-52e1eb0fde76\") " pod="openstack/nova-api-0" Oct 11 05:11:02 crc kubenswrapper[4651]: I1011 05:11:02.475356 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4791f0e9-7fb3-4b2e-8e67-52e1eb0fde76-public-tls-certs\") pod \"nova-api-0\" (UID: \"4791f0e9-7fb3-4b2e-8e67-52e1eb0fde76\") " pod="openstack/nova-api-0" Oct 11 05:11:02 crc kubenswrapper[4651]: I1011 05:11:02.477613 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4791f0e9-7fb3-4b2e-8e67-52e1eb0fde76-config-data\") pod \"nova-api-0\" (UID: \"4791f0e9-7fb3-4b2e-8e67-52e1eb0fde76\") " pod="openstack/nova-api-0" Oct 11 05:11:02 crc kubenswrapper[4651]: I1011 05:11:02.478244 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4791f0e9-7fb3-4b2e-8e67-52e1eb0fde76-internal-tls-certs\") pod \"nova-api-0\" (UID: \"4791f0e9-7fb3-4b2e-8e67-52e1eb0fde76\") " pod="openstack/nova-api-0" Oct 11 05:11:02 crc kubenswrapper[4651]: I1011 05:11:02.478999 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4791f0e9-7fb3-4b2e-8e67-52e1eb0fde76-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"4791f0e9-7fb3-4b2e-8e67-52e1eb0fde76\") " pod="openstack/nova-api-0" Oct 11 05:11:02 crc kubenswrapper[4651]: I1011 05:11:02.490300 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p79wt\" (UniqueName: \"kubernetes.io/projected/4791f0e9-7fb3-4b2e-8e67-52e1eb0fde76-kube-api-access-p79wt\") pod \"nova-api-0\" (UID: \"4791f0e9-7fb3-4b2e-8e67-52e1eb0fde76\") " pod="openstack/nova-api-0" Oct 11 05:11:02 crc kubenswrapper[4651]: I1011 05:11:02.519659 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 11 05:11:03 crc kubenswrapper[4651]: I1011 05:11:03.064041 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 11 05:11:03 crc kubenswrapper[4651]: I1011 05:11:03.096499 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/df04f80e-387c-40c8-a293-97cf7d3f4c79-config-data\") pod \"df04f80e-387c-40c8-a293-97cf7d3f4c79\" (UID: \"df04f80e-387c-40c8-a293-97cf7d3f4c79\") " Oct 11 05:11:03 crc kubenswrapper[4651]: I1011 05:11:03.096598 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pnjfz\" (UniqueName: \"kubernetes.io/projected/df04f80e-387c-40c8-a293-97cf7d3f4c79-kube-api-access-pnjfz\") pod \"df04f80e-387c-40c8-a293-97cf7d3f4c79\" (UID: \"df04f80e-387c-40c8-a293-97cf7d3f4c79\") " Oct 11 05:11:03 crc kubenswrapper[4651]: I1011 05:11:03.096693 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df04f80e-387c-40c8-a293-97cf7d3f4c79-combined-ca-bundle\") pod \"df04f80e-387c-40c8-a293-97cf7d3f4c79\" (UID: \"df04f80e-387c-40c8-a293-97cf7d3f4c79\") " Oct 11 05:11:03 crc kubenswrapper[4651]: I1011 05:11:03.104045 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/df04f80e-387c-40c8-a293-97cf7d3f4c79-kube-api-access-pnjfz" (OuterVolumeSpecName: "kube-api-access-pnjfz") pod "df04f80e-387c-40c8-a293-97cf7d3f4c79" (UID: "df04f80e-387c-40c8-a293-97cf7d3f4c79"). InnerVolumeSpecName "kube-api-access-pnjfz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:11:03 crc kubenswrapper[4651]: I1011 05:11:03.114632 4651 generic.go:334] "Generic (PLEG): container finished" podID="df04f80e-387c-40c8-a293-97cf7d3f4c79" containerID="531973352bb051d3570df2038989aef5dc001e82e263fce7d939afcff1e3e6c7" exitCode=0 Oct 11 05:11:03 crc kubenswrapper[4651]: I1011 05:11:03.114691 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"df04f80e-387c-40c8-a293-97cf7d3f4c79","Type":"ContainerDied","Data":"531973352bb051d3570df2038989aef5dc001e82e263fce7d939afcff1e3e6c7"} Oct 11 05:11:03 crc kubenswrapper[4651]: I1011 05:11:03.114723 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"df04f80e-387c-40c8-a293-97cf7d3f4c79","Type":"ContainerDied","Data":"bda0e32b9cbeb3457310dc04dd68d4b4a2ac2678c2a966a1e3eb31f2ab85a05f"} Oct 11 05:11:03 crc kubenswrapper[4651]: I1011 05:11:03.114739 4651 scope.go:117] "RemoveContainer" containerID="531973352bb051d3570df2038989aef5dc001e82e263fce7d939afcff1e3e6c7" Oct 11 05:11:03 crc kubenswrapper[4651]: I1011 05:11:03.114804 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 11 05:11:03 crc kubenswrapper[4651]: I1011 05:11:03.133040 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/df04f80e-387c-40c8-a293-97cf7d3f4c79-config-data" (OuterVolumeSpecName: "config-data") pod "df04f80e-387c-40c8-a293-97cf7d3f4c79" (UID: "df04f80e-387c-40c8-a293-97cf7d3f4c79"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:11:03 crc kubenswrapper[4651]: I1011 05:11:03.136490 4651 scope.go:117] "RemoveContainer" containerID="531973352bb051d3570df2038989aef5dc001e82e263fce7d939afcff1e3e6c7" Oct 11 05:11:03 crc kubenswrapper[4651]: I1011 05:11:03.136864 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/df04f80e-387c-40c8-a293-97cf7d3f4c79-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "df04f80e-387c-40c8-a293-97cf7d3f4c79" (UID: "df04f80e-387c-40c8-a293-97cf7d3f4c79"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:11:03 crc kubenswrapper[4651]: E1011 05:11:03.138298 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"531973352bb051d3570df2038989aef5dc001e82e263fce7d939afcff1e3e6c7\": container with ID starting with 531973352bb051d3570df2038989aef5dc001e82e263fce7d939afcff1e3e6c7 not found: ID does not exist" containerID="531973352bb051d3570df2038989aef5dc001e82e263fce7d939afcff1e3e6c7" Oct 11 05:11:03 crc kubenswrapper[4651]: I1011 05:11:03.138354 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"531973352bb051d3570df2038989aef5dc001e82e263fce7d939afcff1e3e6c7"} err="failed to get container status \"531973352bb051d3570df2038989aef5dc001e82e263fce7d939afcff1e3e6c7\": rpc error: code = NotFound desc = could not find container \"531973352bb051d3570df2038989aef5dc001e82e263fce7d939afcff1e3e6c7\": container with ID starting with 531973352bb051d3570df2038989aef5dc001e82e263fce7d939afcff1e3e6c7 not found: ID does not exist" Oct 11 05:11:03 crc kubenswrapper[4651]: W1011 05:11:03.175905 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4791f0e9_7fb3_4b2e_8e67_52e1eb0fde76.slice/crio-0aeef46d967b2cb516f39c8efae45c030b2749a589bcb83e083a2ce12be1b0d7 WatchSource:0}: Error finding container 0aeef46d967b2cb516f39c8efae45c030b2749a589bcb83e083a2ce12be1b0d7: Status 404 returned error can't find the container with id 0aeef46d967b2cb516f39c8efae45c030b2749a589bcb83e083a2ce12be1b0d7 Oct 11 05:11:03 crc kubenswrapper[4651]: I1011 05:11:03.178012 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 11 05:11:03 crc kubenswrapper[4651]: I1011 05:11:03.200517 4651 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/df04f80e-387c-40c8-a293-97cf7d3f4c79-config-data\") on node \"crc\" DevicePath \"\"" Oct 11 05:11:03 crc kubenswrapper[4651]: I1011 05:11:03.200540 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pnjfz\" (UniqueName: \"kubernetes.io/projected/df04f80e-387c-40c8-a293-97cf7d3f4c79-kube-api-access-pnjfz\") on node \"crc\" DevicePath \"\"" Oct 11 05:11:03 crc kubenswrapper[4651]: I1011 05:11:03.200550 4651 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df04f80e-387c-40c8-a293-97cf7d3f4c79-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 05:11:03 crc kubenswrapper[4651]: I1011 05:11:03.509540 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 11 05:11:03 crc kubenswrapper[4651]: I1011 05:11:03.519954 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Oct 11 05:11:03 crc kubenswrapper[4651]: I1011 05:11:03.532017 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Oct 11 05:11:03 crc kubenswrapper[4651]: E1011 05:11:03.532409 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df04f80e-387c-40c8-a293-97cf7d3f4c79" containerName="nova-scheduler-scheduler" Oct 11 05:11:03 crc kubenswrapper[4651]: I1011 05:11:03.532427 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="df04f80e-387c-40c8-a293-97cf7d3f4c79" containerName="nova-scheduler-scheduler" Oct 11 05:11:03 crc kubenswrapper[4651]: I1011 05:11:03.532616 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="df04f80e-387c-40c8-a293-97cf7d3f4c79" containerName="nova-scheduler-scheduler" Oct 11 05:11:03 crc kubenswrapper[4651]: I1011 05:11:03.533218 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 11 05:11:03 crc kubenswrapper[4651]: I1011 05:11:03.537470 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Oct 11 05:11:03 crc kubenswrapper[4651]: I1011 05:11:03.545021 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 11 05:11:03 crc kubenswrapper[4651]: I1011 05:11:03.608509 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14edb1d7-f645-4c9e-8363-0342719b2457-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"14edb1d7-f645-4c9e-8363-0342719b2457\") " pod="openstack/nova-scheduler-0" Oct 11 05:11:03 crc kubenswrapper[4651]: I1011 05:11:03.608855 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/14edb1d7-f645-4c9e-8363-0342719b2457-config-data\") pod \"nova-scheduler-0\" (UID: \"14edb1d7-f645-4c9e-8363-0342719b2457\") " pod="openstack/nova-scheduler-0" Oct 11 05:11:03 crc kubenswrapper[4651]: I1011 05:11:03.608892 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6xtx6\" (UniqueName: \"kubernetes.io/projected/14edb1d7-f645-4c9e-8363-0342719b2457-kube-api-access-6xtx6\") pod \"nova-scheduler-0\" (UID: \"14edb1d7-f645-4c9e-8363-0342719b2457\") " pod="openstack/nova-scheduler-0" Oct 11 05:11:03 crc kubenswrapper[4651]: I1011 05:11:03.710577 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14edb1d7-f645-4c9e-8363-0342719b2457-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"14edb1d7-f645-4c9e-8363-0342719b2457\") " pod="openstack/nova-scheduler-0" Oct 11 05:11:03 crc kubenswrapper[4651]: I1011 05:11:03.710690 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/14edb1d7-f645-4c9e-8363-0342719b2457-config-data\") pod \"nova-scheduler-0\" (UID: \"14edb1d7-f645-4c9e-8363-0342719b2457\") " pod="openstack/nova-scheduler-0" Oct 11 05:11:03 crc kubenswrapper[4651]: I1011 05:11:03.710720 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6xtx6\" (UniqueName: \"kubernetes.io/projected/14edb1d7-f645-4c9e-8363-0342719b2457-kube-api-access-6xtx6\") pod \"nova-scheduler-0\" (UID: \"14edb1d7-f645-4c9e-8363-0342719b2457\") " pod="openstack/nova-scheduler-0" Oct 11 05:11:03 crc kubenswrapper[4651]: I1011 05:11:03.719739 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/14edb1d7-f645-4c9e-8363-0342719b2457-config-data\") pod \"nova-scheduler-0\" (UID: \"14edb1d7-f645-4c9e-8363-0342719b2457\") " pod="openstack/nova-scheduler-0" Oct 11 05:11:03 crc kubenswrapper[4651]: I1011 05:11:03.719830 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14edb1d7-f645-4c9e-8363-0342719b2457-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"14edb1d7-f645-4c9e-8363-0342719b2457\") " pod="openstack/nova-scheduler-0" Oct 11 05:11:03 crc kubenswrapper[4651]: I1011 05:11:03.726305 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6xtx6\" (UniqueName: \"kubernetes.io/projected/14edb1d7-f645-4c9e-8363-0342719b2457-kube-api-access-6xtx6\") pod \"nova-scheduler-0\" (UID: \"14edb1d7-f645-4c9e-8363-0342719b2457\") " pod="openstack/nova-scheduler-0" Oct 11 05:11:03 crc kubenswrapper[4651]: I1011 05:11:03.859707 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 11 05:11:03 crc kubenswrapper[4651]: I1011 05:11:03.884699 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="701bb8ef-4d1a-4c19-b102-f57ace7ce0eb" path="/var/lib/kubelet/pods/701bb8ef-4d1a-4c19-b102-f57ace7ce0eb/volumes" Oct 11 05:11:03 crc kubenswrapper[4651]: I1011 05:11:03.885336 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="df04f80e-387c-40c8-a293-97cf7d3f4c79" path="/var/lib/kubelet/pods/df04f80e-387c-40c8-a293-97cf7d3f4c79/volumes" Oct 11 05:11:04 crc kubenswrapper[4651]: I1011 05:11:04.132475 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"4791f0e9-7fb3-4b2e-8e67-52e1eb0fde76","Type":"ContainerStarted","Data":"df1a4a86f5f21cb1880397e1b54bd4509ff73a1a4a206df70a090b7300180ff0"} Oct 11 05:11:04 crc kubenswrapper[4651]: I1011 05:11:04.132522 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"4791f0e9-7fb3-4b2e-8e67-52e1eb0fde76","Type":"ContainerStarted","Data":"f0b2bcceb3b0f70af199d7e17f2b909a84125f1205cae64eab00c52fccbdb8ab"} Oct 11 05:11:04 crc kubenswrapper[4651]: I1011 05:11:04.132533 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"4791f0e9-7fb3-4b2e-8e67-52e1eb0fde76","Type":"ContainerStarted","Data":"0aeef46d967b2cb516f39c8efae45c030b2749a589bcb83e083a2ce12be1b0d7"} Oct 11 05:11:04 crc kubenswrapper[4651]: I1011 05:11:04.158946 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.158927539 podStartE2EDuration="2.158927539s" podCreationTimestamp="2025-10-11 05:11:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 05:11:04.149108014 +0000 UTC m=+1185.045340820" watchObservedRunningTime="2025-10-11 05:11:04.158927539 +0000 UTC m=+1185.055160335" Oct 11 05:11:04 crc kubenswrapper[4651]: I1011 05:11:04.328555 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 11 05:11:04 crc kubenswrapper[4651]: W1011 05:11:04.331881 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod14edb1d7_f645_4c9e_8363_0342719b2457.slice/crio-06b695ae33e7cf10cf223189d8612570aeb06a1c0cd9e4961fd7833901f15884 WatchSource:0}: Error finding container 06b695ae33e7cf10cf223189d8612570aeb06a1c0cd9e4961fd7833901f15884: Status 404 returned error can't find the container with id 06b695ae33e7cf10cf223189d8612570aeb06a1c0cd9e4961fd7833901f15884 Oct 11 05:11:04 crc kubenswrapper[4651]: I1011 05:11:04.448984 4651 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="29a549a6-e4d5-41c6-8042-d6a88f17fa94" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.198:8775/\": read tcp 10.217.0.2:46000->10.217.0.198:8775: read: connection reset by peer" Oct 11 05:11:04 crc kubenswrapper[4651]: I1011 05:11:04.449736 4651 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="29a549a6-e4d5-41c6-8042-d6a88f17fa94" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.198:8775/\": read tcp 10.217.0.2:46002->10.217.0.198:8775: read: connection reset by peer" Oct 11 05:11:04 crc kubenswrapper[4651]: I1011 05:11:04.927656 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 11 05:11:04 crc kubenswrapper[4651]: I1011 05:11:04.951113 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29a549a6-e4d5-41c6-8042-d6a88f17fa94-combined-ca-bundle\") pod \"29a549a6-e4d5-41c6-8042-d6a88f17fa94\" (UID: \"29a549a6-e4d5-41c6-8042-d6a88f17fa94\") " Oct 11 05:11:04 crc kubenswrapper[4651]: I1011 05:11:04.951346 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/29a549a6-e4d5-41c6-8042-d6a88f17fa94-nova-metadata-tls-certs\") pod \"29a549a6-e4d5-41c6-8042-d6a88f17fa94\" (UID: \"29a549a6-e4d5-41c6-8042-d6a88f17fa94\") " Oct 11 05:11:04 crc kubenswrapper[4651]: I1011 05:11:04.951414 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/29a549a6-e4d5-41c6-8042-d6a88f17fa94-logs\") pod \"29a549a6-e4d5-41c6-8042-d6a88f17fa94\" (UID: \"29a549a6-e4d5-41c6-8042-d6a88f17fa94\") " Oct 11 05:11:04 crc kubenswrapper[4651]: I1011 05:11:04.951473 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hhjm9\" (UniqueName: \"kubernetes.io/projected/29a549a6-e4d5-41c6-8042-d6a88f17fa94-kube-api-access-hhjm9\") pod \"29a549a6-e4d5-41c6-8042-d6a88f17fa94\" (UID: \"29a549a6-e4d5-41c6-8042-d6a88f17fa94\") " Oct 11 05:11:04 crc kubenswrapper[4651]: I1011 05:11:04.951497 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/29a549a6-e4d5-41c6-8042-d6a88f17fa94-config-data\") pod \"29a549a6-e4d5-41c6-8042-d6a88f17fa94\" (UID: \"29a549a6-e4d5-41c6-8042-d6a88f17fa94\") " Oct 11 05:11:04 crc kubenswrapper[4651]: I1011 05:11:04.963444 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/29a549a6-e4d5-41c6-8042-d6a88f17fa94-logs" (OuterVolumeSpecName: "logs") pod "29a549a6-e4d5-41c6-8042-d6a88f17fa94" (UID: "29a549a6-e4d5-41c6-8042-d6a88f17fa94"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 05:11:04 crc kubenswrapper[4651]: I1011 05:11:04.987592 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/29a549a6-e4d5-41c6-8042-d6a88f17fa94-kube-api-access-hhjm9" (OuterVolumeSpecName: "kube-api-access-hhjm9") pod "29a549a6-e4d5-41c6-8042-d6a88f17fa94" (UID: "29a549a6-e4d5-41c6-8042-d6a88f17fa94"). InnerVolumeSpecName "kube-api-access-hhjm9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:11:05 crc kubenswrapper[4651]: I1011 05:11:05.011581 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29a549a6-e4d5-41c6-8042-d6a88f17fa94-config-data" (OuterVolumeSpecName: "config-data") pod "29a549a6-e4d5-41c6-8042-d6a88f17fa94" (UID: "29a549a6-e4d5-41c6-8042-d6a88f17fa94"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:11:05 crc kubenswrapper[4651]: I1011 05:11:05.036532 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29a549a6-e4d5-41c6-8042-d6a88f17fa94-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "29a549a6-e4d5-41c6-8042-d6a88f17fa94" (UID: "29a549a6-e4d5-41c6-8042-d6a88f17fa94"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:11:05 crc kubenswrapper[4651]: I1011 05:11:05.054250 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hhjm9\" (UniqueName: \"kubernetes.io/projected/29a549a6-e4d5-41c6-8042-d6a88f17fa94-kube-api-access-hhjm9\") on node \"crc\" DevicePath \"\"" Oct 11 05:11:05 crc kubenswrapper[4651]: I1011 05:11:05.054295 4651 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/29a549a6-e4d5-41c6-8042-d6a88f17fa94-config-data\") on node \"crc\" DevicePath \"\"" Oct 11 05:11:05 crc kubenswrapper[4651]: I1011 05:11:05.054309 4651 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29a549a6-e4d5-41c6-8042-d6a88f17fa94-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 05:11:05 crc kubenswrapper[4651]: I1011 05:11:05.054320 4651 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/29a549a6-e4d5-41c6-8042-d6a88f17fa94-logs\") on node \"crc\" DevicePath \"\"" Oct 11 05:11:05 crc kubenswrapper[4651]: I1011 05:11:05.062415 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29a549a6-e4d5-41c6-8042-d6a88f17fa94-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "29a549a6-e4d5-41c6-8042-d6a88f17fa94" (UID: "29a549a6-e4d5-41c6-8042-d6a88f17fa94"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:11:05 crc kubenswrapper[4651]: I1011 05:11:05.143993 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"14edb1d7-f645-4c9e-8363-0342719b2457","Type":"ContainerStarted","Data":"2e12d98b8dfceaad0d40ab08ca8b8fc762cba949790a9c9c5e381b3262672517"} Oct 11 05:11:05 crc kubenswrapper[4651]: I1011 05:11:05.144062 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"14edb1d7-f645-4c9e-8363-0342719b2457","Type":"ContainerStarted","Data":"06b695ae33e7cf10cf223189d8612570aeb06a1c0cd9e4961fd7833901f15884"} Oct 11 05:11:05 crc kubenswrapper[4651]: I1011 05:11:05.147299 4651 generic.go:334] "Generic (PLEG): container finished" podID="29a549a6-e4d5-41c6-8042-d6a88f17fa94" containerID="d261e942118d66979e4108db8d86a64b96d0bb437acdfd1c0130c3a62af2635d" exitCode=0 Oct 11 05:11:05 crc kubenswrapper[4651]: I1011 05:11:05.147858 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 11 05:11:05 crc kubenswrapper[4651]: I1011 05:11:05.149110 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"29a549a6-e4d5-41c6-8042-d6a88f17fa94","Type":"ContainerDied","Data":"d261e942118d66979e4108db8d86a64b96d0bb437acdfd1c0130c3a62af2635d"} Oct 11 05:11:05 crc kubenswrapper[4651]: I1011 05:11:05.149143 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"29a549a6-e4d5-41c6-8042-d6a88f17fa94","Type":"ContainerDied","Data":"037ce715335264026e41bc2fbe5d05f41cbc7fc8c158cde22dd67a1f1c227123"} Oct 11 05:11:05 crc kubenswrapper[4651]: I1011 05:11:05.149163 4651 scope.go:117] "RemoveContainer" containerID="d261e942118d66979e4108db8d86a64b96d0bb437acdfd1c0130c3a62af2635d" Oct 11 05:11:05 crc kubenswrapper[4651]: I1011 05:11:05.163564 4651 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/29a549a6-e4d5-41c6-8042-d6a88f17fa94-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 11 05:11:05 crc kubenswrapper[4651]: I1011 05:11:05.176321 4651 scope.go:117] "RemoveContainer" containerID="069ff3e520fa9810a7879f49294ab184f3ea5b81ae4dfe4ac1e021c70b5be95d" Oct 11 05:11:05 crc kubenswrapper[4651]: I1011 05:11:05.194966 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.19494893 podStartE2EDuration="2.19494893s" podCreationTimestamp="2025-10-11 05:11:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 05:11:05.163923866 +0000 UTC m=+1186.060156672" watchObservedRunningTime="2025-10-11 05:11:05.19494893 +0000 UTC m=+1186.091181726" Oct 11 05:11:05 crc kubenswrapper[4651]: I1011 05:11:05.196682 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 11 05:11:05 crc kubenswrapper[4651]: I1011 05:11:05.204144 4651 scope.go:117] "RemoveContainer" containerID="d261e942118d66979e4108db8d86a64b96d0bb437acdfd1c0130c3a62af2635d" Oct 11 05:11:05 crc kubenswrapper[4651]: E1011 05:11:05.204556 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d261e942118d66979e4108db8d86a64b96d0bb437acdfd1c0130c3a62af2635d\": container with ID starting with d261e942118d66979e4108db8d86a64b96d0bb437acdfd1c0130c3a62af2635d not found: ID does not exist" containerID="d261e942118d66979e4108db8d86a64b96d0bb437acdfd1c0130c3a62af2635d" Oct 11 05:11:05 crc kubenswrapper[4651]: I1011 05:11:05.204597 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d261e942118d66979e4108db8d86a64b96d0bb437acdfd1c0130c3a62af2635d"} err="failed to get container status \"d261e942118d66979e4108db8d86a64b96d0bb437acdfd1c0130c3a62af2635d\": rpc error: code = NotFound desc = could not find container \"d261e942118d66979e4108db8d86a64b96d0bb437acdfd1c0130c3a62af2635d\": container with ID starting with d261e942118d66979e4108db8d86a64b96d0bb437acdfd1c0130c3a62af2635d not found: ID does not exist" Oct 11 05:11:05 crc kubenswrapper[4651]: I1011 05:11:05.204622 4651 scope.go:117] "RemoveContainer" containerID="069ff3e520fa9810a7879f49294ab184f3ea5b81ae4dfe4ac1e021c70b5be95d" Oct 11 05:11:05 crc kubenswrapper[4651]: E1011 05:11:05.205592 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"069ff3e520fa9810a7879f49294ab184f3ea5b81ae4dfe4ac1e021c70b5be95d\": container with ID starting with 069ff3e520fa9810a7879f49294ab184f3ea5b81ae4dfe4ac1e021c70b5be95d not found: ID does not exist" containerID="069ff3e520fa9810a7879f49294ab184f3ea5b81ae4dfe4ac1e021c70b5be95d" Oct 11 05:11:05 crc kubenswrapper[4651]: I1011 05:11:05.205620 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"069ff3e520fa9810a7879f49294ab184f3ea5b81ae4dfe4ac1e021c70b5be95d"} err="failed to get container status \"069ff3e520fa9810a7879f49294ab184f3ea5b81ae4dfe4ac1e021c70b5be95d\": rpc error: code = NotFound desc = could not find container \"069ff3e520fa9810a7879f49294ab184f3ea5b81ae4dfe4ac1e021c70b5be95d\": container with ID starting with 069ff3e520fa9810a7879f49294ab184f3ea5b81ae4dfe4ac1e021c70b5be95d not found: ID does not exist" Oct 11 05:11:05 crc kubenswrapper[4651]: I1011 05:11:05.211358 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Oct 11 05:11:05 crc kubenswrapper[4651]: I1011 05:11:05.221870 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Oct 11 05:11:05 crc kubenswrapper[4651]: E1011 05:11:05.222238 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29a549a6-e4d5-41c6-8042-d6a88f17fa94" containerName="nova-metadata-log" Oct 11 05:11:05 crc kubenswrapper[4651]: I1011 05:11:05.222256 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="29a549a6-e4d5-41c6-8042-d6a88f17fa94" containerName="nova-metadata-log" Oct 11 05:11:05 crc kubenswrapper[4651]: E1011 05:11:05.222275 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29a549a6-e4d5-41c6-8042-d6a88f17fa94" containerName="nova-metadata-metadata" Oct 11 05:11:05 crc kubenswrapper[4651]: I1011 05:11:05.222280 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="29a549a6-e4d5-41c6-8042-d6a88f17fa94" containerName="nova-metadata-metadata" Oct 11 05:11:05 crc kubenswrapper[4651]: I1011 05:11:05.222462 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="29a549a6-e4d5-41c6-8042-d6a88f17fa94" containerName="nova-metadata-metadata" Oct 11 05:11:05 crc kubenswrapper[4651]: I1011 05:11:05.222482 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="29a549a6-e4d5-41c6-8042-d6a88f17fa94" containerName="nova-metadata-log" Oct 11 05:11:05 crc kubenswrapper[4651]: I1011 05:11:05.223399 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 11 05:11:05 crc kubenswrapper[4651]: I1011 05:11:05.226117 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Oct 11 05:11:05 crc kubenswrapper[4651]: I1011 05:11:05.227994 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Oct 11 05:11:05 crc kubenswrapper[4651]: I1011 05:11:05.244754 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 11 05:11:05 crc kubenswrapper[4651]: I1011 05:11:05.266267 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7d5fc748-aa4e-45c9-a268-9fdc8b2ae358-logs\") pod \"nova-metadata-0\" (UID: \"7d5fc748-aa4e-45c9-a268-9fdc8b2ae358\") " pod="openstack/nova-metadata-0" Oct 11 05:11:05 crc kubenswrapper[4651]: I1011 05:11:05.266570 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hq5t5\" (UniqueName: \"kubernetes.io/projected/7d5fc748-aa4e-45c9-a268-9fdc8b2ae358-kube-api-access-hq5t5\") pod \"nova-metadata-0\" (UID: \"7d5fc748-aa4e-45c9-a268-9fdc8b2ae358\") " pod="openstack/nova-metadata-0" Oct 11 05:11:05 crc kubenswrapper[4651]: I1011 05:11:05.266667 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7d5fc748-aa4e-45c9-a268-9fdc8b2ae358-config-data\") pod \"nova-metadata-0\" (UID: \"7d5fc748-aa4e-45c9-a268-9fdc8b2ae358\") " pod="openstack/nova-metadata-0" Oct 11 05:11:05 crc kubenswrapper[4651]: I1011 05:11:05.266769 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/7d5fc748-aa4e-45c9-a268-9fdc8b2ae358-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"7d5fc748-aa4e-45c9-a268-9fdc8b2ae358\") " pod="openstack/nova-metadata-0" Oct 11 05:11:05 crc kubenswrapper[4651]: I1011 05:11:05.266863 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d5fc748-aa4e-45c9-a268-9fdc8b2ae358-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"7d5fc748-aa4e-45c9-a268-9fdc8b2ae358\") " pod="openstack/nova-metadata-0" Oct 11 05:11:05 crc kubenswrapper[4651]: I1011 05:11:05.368834 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hq5t5\" (UniqueName: \"kubernetes.io/projected/7d5fc748-aa4e-45c9-a268-9fdc8b2ae358-kube-api-access-hq5t5\") pod \"nova-metadata-0\" (UID: \"7d5fc748-aa4e-45c9-a268-9fdc8b2ae358\") " pod="openstack/nova-metadata-0" Oct 11 05:11:05 crc kubenswrapper[4651]: I1011 05:11:05.368873 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7d5fc748-aa4e-45c9-a268-9fdc8b2ae358-config-data\") pod \"nova-metadata-0\" (UID: \"7d5fc748-aa4e-45c9-a268-9fdc8b2ae358\") " pod="openstack/nova-metadata-0" Oct 11 05:11:05 crc kubenswrapper[4651]: I1011 05:11:05.368906 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/7d5fc748-aa4e-45c9-a268-9fdc8b2ae358-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"7d5fc748-aa4e-45c9-a268-9fdc8b2ae358\") " pod="openstack/nova-metadata-0" Oct 11 05:11:05 crc kubenswrapper[4651]: I1011 05:11:05.368924 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d5fc748-aa4e-45c9-a268-9fdc8b2ae358-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"7d5fc748-aa4e-45c9-a268-9fdc8b2ae358\") " pod="openstack/nova-metadata-0" Oct 11 05:11:05 crc kubenswrapper[4651]: I1011 05:11:05.369010 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7d5fc748-aa4e-45c9-a268-9fdc8b2ae358-logs\") pod \"nova-metadata-0\" (UID: \"7d5fc748-aa4e-45c9-a268-9fdc8b2ae358\") " pod="openstack/nova-metadata-0" Oct 11 05:11:05 crc kubenswrapper[4651]: I1011 05:11:05.369337 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7d5fc748-aa4e-45c9-a268-9fdc8b2ae358-logs\") pod \"nova-metadata-0\" (UID: \"7d5fc748-aa4e-45c9-a268-9fdc8b2ae358\") " pod="openstack/nova-metadata-0" Oct 11 05:11:05 crc kubenswrapper[4651]: I1011 05:11:05.374062 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7d5fc748-aa4e-45c9-a268-9fdc8b2ae358-config-data\") pod \"nova-metadata-0\" (UID: \"7d5fc748-aa4e-45c9-a268-9fdc8b2ae358\") " pod="openstack/nova-metadata-0" Oct 11 05:11:05 crc kubenswrapper[4651]: I1011 05:11:05.378175 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/7d5fc748-aa4e-45c9-a268-9fdc8b2ae358-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"7d5fc748-aa4e-45c9-a268-9fdc8b2ae358\") " pod="openstack/nova-metadata-0" Oct 11 05:11:05 crc kubenswrapper[4651]: I1011 05:11:05.380782 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d5fc748-aa4e-45c9-a268-9fdc8b2ae358-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"7d5fc748-aa4e-45c9-a268-9fdc8b2ae358\") " pod="openstack/nova-metadata-0" Oct 11 05:11:05 crc kubenswrapper[4651]: I1011 05:11:05.389869 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hq5t5\" (UniqueName: \"kubernetes.io/projected/7d5fc748-aa4e-45c9-a268-9fdc8b2ae358-kube-api-access-hq5t5\") pod \"nova-metadata-0\" (UID: \"7d5fc748-aa4e-45c9-a268-9fdc8b2ae358\") " pod="openstack/nova-metadata-0" Oct 11 05:11:05 crc kubenswrapper[4651]: I1011 05:11:05.561110 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 11 05:11:05 crc kubenswrapper[4651]: I1011 05:11:05.879748 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="29a549a6-e4d5-41c6-8042-d6a88f17fa94" path="/var/lib/kubelet/pods/29a549a6-e4d5-41c6-8042-d6a88f17fa94/volumes" Oct 11 05:11:06 crc kubenswrapper[4651]: W1011 05:11:06.028011 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7d5fc748_aa4e_45c9_a268_9fdc8b2ae358.slice/crio-f3552f5b416813a592f9c82261b7c3b8d8ed808bb9b32c25e5be653feef13648 WatchSource:0}: Error finding container f3552f5b416813a592f9c82261b7c3b8d8ed808bb9b32c25e5be653feef13648: Status 404 returned error can't find the container with id f3552f5b416813a592f9c82261b7c3b8d8ed808bb9b32c25e5be653feef13648 Oct 11 05:11:06 crc kubenswrapper[4651]: I1011 05:11:06.029149 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 11 05:11:06 crc kubenswrapper[4651]: I1011 05:11:06.162708 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"7d5fc748-aa4e-45c9-a268-9fdc8b2ae358","Type":"ContainerStarted","Data":"f3552f5b416813a592f9c82261b7c3b8d8ed808bb9b32c25e5be653feef13648"} Oct 11 05:11:07 crc kubenswrapper[4651]: I1011 05:11:07.173092 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"7d5fc748-aa4e-45c9-a268-9fdc8b2ae358","Type":"ContainerStarted","Data":"c4f6321123533ddb72997635a8d7e956339671c97dd44a4898f08fcbfeef9842"} Oct 11 05:11:07 crc kubenswrapper[4651]: I1011 05:11:07.173458 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"7d5fc748-aa4e-45c9-a268-9fdc8b2ae358","Type":"ContainerStarted","Data":"e40608e75c33a0ead16846304faf8ee45d99b83003034d029193ac33e0694705"} Oct 11 05:11:07 crc kubenswrapper[4651]: I1011 05:11:07.215715 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.215696482 podStartE2EDuration="2.215696482s" podCreationTimestamp="2025-10-11 05:11:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 05:11:07.213746874 +0000 UTC m=+1188.109979690" watchObservedRunningTime="2025-10-11 05:11:07.215696482 +0000 UTC m=+1188.111929278" Oct 11 05:11:08 crc kubenswrapper[4651]: I1011 05:11:08.860845 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Oct 11 05:11:10 crc kubenswrapper[4651]: I1011 05:11:10.561427 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 11 05:11:10 crc kubenswrapper[4651]: I1011 05:11:10.562851 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 11 05:11:12 crc kubenswrapper[4651]: I1011 05:11:12.520529 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 11 05:11:12 crc kubenswrapper[4651]: I1011 05:11:12.520848 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 11 05:11:13 crc kubenswrapper[4651]: I1011 05:11:13.533183 4651 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="4791f0e9-7fb3-4b2e-8e67-52e1eb0fde76" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.206:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 11 05:11:13 crc kubenswrapper[4651]: I1011 05:11:13.533140 4651 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="4791f0e9-7fb3-4b2e-8e67-52e1eb0fde76" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.206:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 11 05:11:13 crc kubenswrapper[4651]: I1011 05:11:13.860521 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Oct 11 05:11:13 crc kubenswrapper[4651]: I1011 05:11:13.921120 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Oct 11 05:11:14 crc kubenswrapper[4651]: I1011 05:11:14.293373 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Oct 11 05:11:15 crc kubenswrapper[4651]: I1011 05:11:15.561333 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 11 05:11:15 crc kubenswrapper[4651]: I1011 05:11:15.561874 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 11 05:11:16 crc kubenswrapper[4651]: I1011 05:11:16.576023 4651 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="7d5fc748-aa4e-45c9-a268-9fdc8b2ae358" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.208:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 11 05:11:16 crc kubenswrapper[4651]: I1011 05:11:16.576032 4651 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="7d5fc748-aa4e-45c9-a268-9fdc8b2ae358" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.208:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 11 05:11:22 crc kubenswrapper[4651]: I1011 05:11:22.527607 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Oct 11 05:11:22 crc kubenswrapper[4651]: I1011 05:11:22.528171 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Oct 11 05:11:22 crc kubenswrapper[4651]: I1011 05:11:22.528696 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Oct 11 05:11:22 crc kubenswrapper[4651]: I1011 05:11:22.528718 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Oct 11 05:11:22 crc kubenswrapper[4651]: I1011 05:11:22.534316 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Oct 11 05:11:22 crc kubenswrapper[4651]: I1011 05:11:22.535036 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Oct 11 05:11:25 crc kubenswrapper[4651]: I1011 05:11:25.567359 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Oct 11 05:11:25 crc kubenswrapper[4651]: I1011 05:11:25.570069 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Oct 11 05:11:25 crc kubenswrapper[4651]: I1011 05:11:25.576014 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Oct 11 05:11:25 crc kubenswrapper[4651]: I1011 05:11:25.583303 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Oct 11 05:11:26 crc kubenswrapper[4651]: I1011 05:11:26.753695 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Oct 11 05:11:37 crc kubenswrapper[4651]: I1011 05:11:37.326804 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 11 05:11:38 crc kubenswrapper[4651]: I1011 05:11:38.724045 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 11 05:11:41 crc kubenswrapper[4651]: I1011 05:11:41.482861 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="e73b125b-a52b-44bb-bbed-3a484f53a9cb" containerName="rabbitmq" containerID="cri-o://59c32683a820f73d4e339008dae95e311ab157d08bc2237c1620fcf487d08772" gracePeriod=604796 Oct 11 05:11:42 crc kubenswrapper[4651]: I1011 05:11:42.941877 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="dbfeee44-d2ad-4a4b-814f-916176925aaf" containerName="rabbitmq" containerID="cri-o://b59dfb8a86a393f1bb73d7fab6f1f3d33ac444d6555c3ef6692794f083ddbdfc" gracePeriod=604796 Oct 11 05:11:48 crc kubenswrapper[4651]: I1011 05:11:48.188525 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 11 05:11:48 crc kubenswrapper[4651]: I1011 05:11:48.338302 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/e73b125b-a52b-44bb-bbed-3a484f53a9cb-rabbitmq-confd\") pod \"e73b125b-a52b-44bb-bbed-3a484f53a9cb\" (UID: \"e73b125b-a52b-44bb-bbed-3a484f53a9cb\") " Oct 11 05:11:48 crc kubenswrapper[4651]: I1011 05:11:48.338359 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/e73b125b-a52b-44bb-bbed-3a484f53a9cb-server-conf\") pod \"e73b125b-a52b-44bb-bbed-3a484f53a9cb\" (UID: \"e73b125b-a52b-44bb-bbed-3a484f53a9cb\") " Oct 11 05:11:48 crc kubenswrapper[4651]: I1011 05:11:48.338409 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/e73b125b-a52b-44bb-bbed-3a484f53a9cb-plugins-conf\") pod \"e73b125b-a52b-44bb-bbed-3a484f53a9cb\" (UID: \"e73b125b-a52b-44bb-bbed-3a484f53a9cb\") " Oct 11 05:11:48 crc kubenswrapper[4651]: I1011 05:11:48.338500 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/e73b125b-a52b-44bb-bbed-3a484f53a9cb-rabbitmq-erlang-cookie\") pod \"e73b125b-a52b-44bb-bbed-3a484f53a9cb\" (UID: \"e73b125b-a52b-44bb-bbed-3a484f53a9cb\") " Oct 11 05:11:48 crc kubenswrapper[4651]: I1011 05:11:48.338529 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/e73b125b-a52b-44bb-bbed-3a484f53a9cb-rabbitmq-tls\") pod \"e73b125b-a52b-44bb-bbed-3a484f53a9cb\" (UID: \"e73b125b-a52b-44bb-bbed-3a484f53a9cb\") " Oct 11 05:11:48 crc kubenswrapper[4651]: I1011 05:11:48.338585 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"e73b125b-a52b-44bb-bbed-3a484f53a9cb\" (UID: \"e73b125b-a52b-44bb-bbed-3a484f53a9cb\") " Oct 11 05:11:48 crc kubenswrapper[4651]: I1011 05:11:48.338609 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bjgdc\" (UniqueName: \"kubernetes.io/projected/e73b125b-a52b-44bb-bbed-3a484f53a9cb-kube-api-access-bjgdc\") pod \"e73b125b-a52b-44bb-bbed-3a484f53a9cb\" (UID: \"e73b125b-a52b-44bb-bbed-3a484f53a9cb\") " Oct 11 05:11:48 crc kubenswrapper[4651]: I1011 05:11:48.338633 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/e73b125b-a52b-44bb-bbed-3a484f53a9cb-rabbitmq-plugins\") pod \"e73b125b-a52b-44bb-bbed-3a484f53a9cb\" (UID: \"e73b125b-a52b-44bb-bbed-3a484f53a9cb\") " Oct 11 05:11:48 crc kubenswrapper[4651]: I1011 05:11:48.338663 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/e73b125b-a52b-44bb-bbed-3a484f53a9cb-pod-info\") pod \"e73b125b-a52b-44bb-bbed-3a484f53a9cb\" (UID: \"e73b125b-a52b-44bb-bbed-3a484f53a9cb\") " Oct 11 05:11:48 crc kubenswrapper[4651]: I1011 05:11:48.338689 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/e73b125b-a52b-44bb-bbed-3a484f53a9cb-erlang-cookie-secret\") pod \"e73b125b-a52b-44bb-bbed-3a484f53a9cb\" (UID: \"e73b125b-a52b-44bb-bbed-3a484f53a9cb\") " Oct 11 05:11:48 crc kubenswrapper[4651]: I1011 05:11:48.338756 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e73b125b-a52b-44bb-bbed-3a484f53a9cb-config-data\") pod \"e73b125b-a52b-44bb-bbed-3a484f53a9cb\" (UID: \"e73b125b-a52b-44bb-bbed-3a484f53a9cb\") " Oct 11 05:11:48 crc kubenswrapper[4651]: I1011 05:11:48.339574 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e73b125b-a52b-44bb-bbed-3a484f53a9cb-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "e73b125b-a52b-44bb-bbed-3a484f53a9cb" (UID: "e73b125b-a52b-44bb-bbed-3a484f53a9cb"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:11:48 crc kubenswrapper[4651]: I1011 05:11:48.339603 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e73b125b-a52b-44bb-bbed-3a484f53a9cb-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "e73b125b-a52b-44bb-bbed-3a484f53a9cb" (UID: "e73b125b-a52b-44bb-bbed-3a484f53a9cb"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 05:11:48 crc kubenswrapper[4651]: I1011 05:11:48.339763 4651 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/e73b125b-a52b-44bb-bbed-3a484f53a9cb-plugins-conf\") on node \"crc\" DevicePath \"\"" Oct 11 05:11:48 crc kubenswrapper[4651]: I1011 05:11:48.339775 4651 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/e73b125b-a52b-44bb-bbed-3a484f53a9cb-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Oct 11 05:11:48 crc kubenswrapper[4651]: I1011 05:11:48.340039 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e73b125b-a52b-44bb-bbed-3a484f53a9cb-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "e73b125b-a52b-44bb-bbed-3a484f53a9cb" (UID: "e73b125b-a52b-44bb-bbed-3a484f53a9cb"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 05:11:48 crc kubenswrapper[4651]: I1011 05:11:48.344598 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e73b125b-a52b-44bb-bbed-3a484f53a9cb-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "e73b125b-a52b-44bb-bbed-3a484f53a9cb" (UID: "e73b125b-a52b-44bb-bbed-3a484f53a9cb"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:11:48 crc kubenswrapper[4651]: I1011 05:11:48.344665 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e73b125b-a52b-44bb-bbed-3a484f53a9cb-kube-api-access-bjgdc" (OuterVolumeSpecName: "kube-api-access-bjgdc") pod "e73b125b-a52b-44bb-bbed-3a484f53a9cb" (UID: "e73b125b-a52b-44bb-bbed-3a484f53a9cb"). InnerVolumeSpecName "kube-api-access-bjgdc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:11:48 crc kubenswrapper[4651]: I1011 05:11:48.346544 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage12-crc" (OuterVolumeSpecName: "persistence") pod "e73b125b-a52b-44bb-bbed-3a484f53a9cb" (UID: "e73b125b-a52b-44bb-bbed-3a484f53a9cb"). InnerVolumeSpecName "local-storage12-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 11 05:11:48 crc kubenswrapper[4651]: I1011 05:11:48.350313 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e73b125b-a52b-44bb-bbed-3a484f53a9cb-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "e73b125b-a52b-44bb-bbed-3a484f53a9cb" (UID: "e73b125b-a52b-44bb-bbed-3a484f53a9cb"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:11:48 crc kubenswrapper[4651]: I1011 05:11:48.366973 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/e73b125b-a52b-44bb-bbed-3a484f53a9cb-pod-info" (OuterVolumeSpecName: "pod-info") pod "e73b125b-a52b-44bb-bbed-3a484f53a9cb" (UID: "e73b125b-a52b-44bb-bbed-3a484f53a9cb"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Oct 11 05:11:48 crc kubenswrapper[4651]: I1011 05:11:48.397099 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e73b125b-a52b-44bb-bbed-3a484f53a9cb-config-data" (OuterVolumeSpecName: "config-data") pod "e73b125b-a52b-44bb-bbed-3a484f53a9cb" (UID: "e73b125b-a52b-44bb-bbed-3a484f53a9cb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:11:48 crc kubenswrapper[4651]: I1011 05:11:48.406463 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e73b125b-a52b-44bb-bbed-3a484f53a9cb-server-conf" (OuterVolumeSpecName: "server-conf") pod "e73b125b-a52b-44bb-bbed-3a484f53a9cb" (UID: "e73b125b-a52b-44bb-bbed-3a484f53a9cb"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:11:48 crc kubenswrapper[4651]: I1011 05:11:48.441645 4651 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/e73b125b-a52b-44bb-bbed-3a484f53a9cb-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Oct 11 05:11:48 crc kubenswrapper[4651]: I1011 05:11:48.441671 4651 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/e73b125b-a52b-44bb-bbed-3a484f53a9cb-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Oct 11 05:11:48 crc kubenswrapper[4651]: I1011 05:11:48.441694 4651 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" " Oct 11 05:11:48 crc kubenswrapper[4651]: I1011 05:11:48.441705 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bjgdc\" (UniqueName: \"kubernetes.io/projected/e73b125b-a52b-44bb-bbed-3a484f53a9cb-kube-api-access-bjgdc\") on node \"crc\" DevicePath \"\"" Oct 11 05:11:48 crc kubenswrapper[4651]: I1011 05:11:48.441716 4651 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/e73b125b-a52b-44bb-bbed-3a484f53a9cb-pod-info\") on node \"crc\" DevicePath \"\"" Oct 11 05:11:48 crc kubenswrapper[4651]: I1011 05:11:48.441725 4651 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/e73b125b-a52b-44bb-bbed-3a484f53a9cb-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Oct 11 05:11:48 crc kubenswrapper[4651]: I1011 05:11:48.441735 4651 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e73b125b-a52b-44bb-bbed-3a484f53a9cb-config-data\") on node \"crc\" DevicePath \"\"" Oct 11 05:11:48 crc kubenswrapper[4651]: I1011 05:11:48.441746 4651 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/e73b125b-a52b-44bb-bbed-3a484f53a9cb-server-conf\") on node \"crc\" DevicePath \"\"" Oct 11 05:11:48 crc kubenswrapper[4651]: I1011 05:11:48.459461 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e73b125b-a52b-44bb-bbed-3a484f53a9cb-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "e73b125b-a52b-44bb-bbed-3a484f53a9cb" (UID: "e73b125b-a52b-44bb-bbed-3a484f53a9cb"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:11:48 crc kubenswrapper[4651]: I1011 05:11:48.463201 4651 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage12-crc" (UniqueName: "kubernetes.io/local-volume/local-storage12-crc") on node "crc" Oct 11 05:11:48 crc kubenswrapper[4651]: I1011 05:11:48.542955 4651 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/e73b125b-a52b-44bb-bbed-3a484f53a9cb-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Oct 11 05:11:48 crc kubenswrapper[4651]: I1011 05:11:48.542989 4651 reconciler_common.go:293] "Volume detached for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" DevicePath \"\"" Oct 11 05:11:48 crc kubenswrapper[4651]: I1011 05:11:48.595957 4651 generic.go:334] "Generic (PLEG): container finished" podID="e73b125b-a52b-44bb-bbed-3a484f53a9cb" containerID="59c32683a820f73d4e339008dae95e311ab157d08bc2237c1620fcf487d08772" exitCode=0 Oct 11 05:11:48 crc kubenswrapper[4651]: I1011 05:11:48.596004 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"e73b125b-a52b-44bb-bbed-3a484f53a9cb","Type":"ContainerDied","Data":"59c32683a820f73d4e339008dae95e311ab157d08bc2237c1620fcf487d08772"} Oct 11 05:11:48 crc kubenswrapper[4651]: I1011 05:11:48.596021 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 11 05:11:48 crc kubenswrapper[4651]: I1011 05:11:48.596033 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"e73b125b-a52b-44bb-bbed-3a484f53a9cb","Type":"ContainerDied","Data":"d48c4a545849869757ea0ab6851826f2a45b302ab86e6bb3fe9e35d0282e8add"} Oct 11 05:11:48 crc kubenswrapper[4651]: I1011 05:11:48.596052 4651 scope.go:117] "RemoveContainer" containerID="59c32683a820f73d4e339008dae95e311ab157d08bc2237c1620fcf487d08772" Oct 11 05:11:48 crc kubenswrapper[4651]: I1011 05:11:48.627353 4651 scope.go:117] "RemoveContainer" containerID="d0718945f619181b6b1f3000cd1eaed2003512a29de9e8f1b43985057d9b10d7" Oct 11 05:11:48 crc kubenswrapper[4651]: I1011 05:11:48.630096 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 11 05:11:48 crc kubenswrapper[4651]: I1011 05:11:48.637832 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 11 05:11:48 crc kubenswrapper[4651]: I1011 05:11:48.659198 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Oct 11 05:11:48 crc kubenswrapper[4651]: E1011 05:11:48.659596 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e73b125b-a52b-44bb-bbed-3a484f53a9cb" containerName="setup-container" Oct 11 05:11:48 crc kubenswrapper[4651]: I1011 05:11:48.659614 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="e73b125b-a52b-44bb-bbed-3a484f53a9cb" containerName="setup-container" Oct 11 05:11:48 crc kubenswrapper[4651]: E1011 05:11:48.659633 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e73b125b-a52b-44bb-bbed-3a484f53a9cb" containerName="rabbitmq" Oct 11 05:11:48 crc kubenswrapper[4651]: I1011 05:11:48.659640 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="e73b125b-a52b-44bb-bbed-3a484f53a9cb" containerName="rabbitmq" Oct 11 05:11:48 crc kubenswrapper[4651]: I1011 05:11:48.659865 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="e73b125b-a52b-44bb-bbed-3a484f53a9cb" containerName="rabbitmq" Oct 11 05:11:48 crc kubenswrapper[4651]: I1011 05:11:48.660245 4651 scope.go:117] "RemoveContainer" containerID="59c32683a820f73d4e339008dae95e311ab157d08bc2237c1620fcf487d08772" Oct 11 05:11:48 crc kubenswrapper[4651]: E1011 05:11:48.660615 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"59c32683a820f73d4e339008dae95e311ab157d08bc2237c1620fcf487d08772\": container with ID starting with 59c32683a820f73d4e339008dae95e311ab157d08bc2237c1620fcf487d08772 not found: ID does not exist" containerID="59c32683a820f73d4e339008dae95e311ab157d08bc2237c1620fcf487d08772" Oct 11 05:11:48 crc kubenswrapper[4651]: I1011 05:11:48.660645 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"59c32683a820f73d4e339008dae95e311ab157d08bc2237c1620fcf487d08772"} err="failed to get container status \"59c32683a820f73d4e339008dae95e311ab157d08bc2237c1620fcf487d08772\": rpc error: code = NotFound desc = could not find container \"59c32683a820f73d4e339008dae95e311ab157d08bc2237c1620fcf487d08772\": container with ID starting with 59c32683a820f73d4e339008dae95e311ab157d08bc2237c1620fcf487d08772 not found: ID does not exist" Oct 11 05:11:48 crc kubenswrapper[4651]: I1011 05:11:48.660670 4651 scope.go:117] "RemoveContainer" containerID="d0718945f619181b6b1f3000cd1eaed2003512a29de9e8f1b43985057d9b10d7" Oct 11 05:11:48 crc kubenswrapper[4651]: I1011 05:11:48.660909 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 11 05:11:48 crc kubenswrapper[4651]: E1011 05:11:48.662079 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d0718945f619181b6b1f3000cd1eaed2003512a29de9e8f1b43985057d9b10d7\": container with ID starting with d0718945f619181b6b1f3000cd1eaed2003512a29de9e8f1b43985057d9b10d7 not found: ID does not exist" containerID="d0718945f619181b6b1f3000cd1eaed2003512a29de9e8f1b43985057d9b10d7" Oct 11 05:11:48 crc kubenswrapper[4651]: I1011 05:11:48.662114 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d0718945f619181b6b1f3000cd1eaed2003512a29de9e8f1b43985057d9b10d7"} err="failed to get container status \"d0718945f619181b6b1f3000cd1eaed2003512a29de9e8f1b43985057d9b10d7\": rpc error: code = NotFound desc = could not find container \"d0718945f619181b6b1f3000cd1eaed2003512a29de9e8f1b43985057d9b10d7\": container with ID starting with d0718945f619181b6b1f3000cd1eaed2003512a29de9e8f1b43985057d9b10d7 not found: ID does not exist" Oct 11 05:11:48 crc kubenswrapper[4651]: I1011 05:11:48.664071 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Oct 11 05:11:48 crc kubenswrapper[4651]: I1011 05:11:48.664232 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Oct 11 05:11:48 crc kubenswrapper[4651]: I1011 05:11:48.664359 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-jwjmf" Oct 11 05:11:48 crc kubenswrapper[4651]: I1011 05:11:48.665427 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Oct 11 05:11:48 crc kubenswrapper[4651]: I1011 05:11:48.665665 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Oct 11 05:11:48 crc kubenswrapper[4651]: I1011 05:11:48.665802 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Oct 11 05:11:48 crc kubenswrapper[4651]: I1011 05:11:48.666441 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Oct 11 05:11:48 crc kubenswrapper[4651]: I1011 05:11:48.671300 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 11 05:11:48 crc kubenswrapper[4651]: I1011 05:11:48.856774 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/f157ed46-75e8-4f03-b4ec-1234385015bd-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"f157ed46-75e8-4f03-b4ec-1234385015bd\") " pod="openstack/rabbitmq-server-0" Oct 11 05:11:48 crc kubenswrapper[4651]: I1011 05:11:48.856833 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/f157ed46-75e8-4f03-b4ec-1234385015bd-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"f157ed46-75e8-4f03-b4ec-1234385015bd\") " pod="openstack/rabbitmq-server-0" Oct 11 05:11:48 crc kubenswrapper[4651]: I1011 05:11:48.856886 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jtmvc\" (UniqueName: \"kubernetes.io/projected/f157ed46-75e8-4f03-b4ec-1234385015bd-kube-api-access-jtmvc\") pod \"rabbitmq-server-0\" (UID: \"f157ed46-75e8-4f03-b4ec-1234385015bd\") " pod="openstack/rabbitmq-server-0" Oct 11 05:11:48 crc kubenswrapper[4651]: I1011 05:11:48.856939 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/f157ed46-75e8-4f03-b4ec-1234385015bd-pod-info\") pod \"rabbitmq-server-0\" (UID: \"f157ed46-75e8-4f03-b4ec-1234385015bd\") " pod="openstack/rabbitmq-server-0" Oct 11 05:11:48 crc kubenswrapper[4651]: I1011 05:11:48.856967 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/f157ed46-75e8-4f03-b4ec-1234385015bd-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"f157ed46-75e8-4f03-b4ec-1234385015bd\") " pod="openstack/rabbitmq-server-0" Oct 11 05:11:48 crc kubenswrapper[4651]: I1011 05:11:48.857022 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-server-0\" (UID: \"f157ed46-75e8-4f03-b4ec-1234385015bd\") " pod="openstack/rabbitmq-server-0" Oct 11 05:11:48 crc kubenswrapper[4651]: I1011 05:11:48.857090 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/f157ed46-75e8-4f03-b4ec-1234385015bd-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"f157ed46-75e8-4f03-b4ec-1234385015bd\") " pod="openstack/rabbitmq-server-0" Oct 11 05:11:48 crc kubenswrapper[4651]: I1011 05:11:48.857138 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f157ed46-75e8-4f03-b4ec-1234385015bd-config-data\") pod \"rabbitmq-server-0\" (UID: \"f157ed46-75e8-4f03-b4ec-1234385015bd\") " pod="openstack/rabbitmq-server-0" Oct 11 05:11:48 crc kubenswrapper[4651]: I1011 05:11:48.857216 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/f157ed46-75e8-4f03-b4ec-1234385015bd-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"f157ed46-75e8-4f03-b4ec-1234385015bd\") " pod="openstack/rabbitmq-server-0" Oct 11 05:11:48 crc kubenswrapper[4651]: I1011 05:11:48.857259 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/f157ed46-75e8-4f03-b4ec-1234385015bd-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"f157ed46-75e8-4f03-b4ec-1234385015bd\") " pod="openstack/rabbitmq-server-0" Oct 11 05:11:48 crc kubenswrapper[4651]: I1011 05:11:48.857274 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/f157ed46-75e8-4f03-b4ec-1234385015bd-server-conf\") pod \"rabbitmq-server-0\" (UID: \"f157ed46-75e8-4f03-b4ec-1234385015bd\") " pod="openstack/rabbitmq-server-0" Oct 11 05:11:48 crc kubenswrapper[4651]: I1011 05:11:48.959118 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/f157ed46-75e8-4f03-b4ec-1234385015bd-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"f157ed46-75e8-4f03-b4ec-1234385015bd\") " pod="openstack/rabbitmq-server-0" Oct 11 05:11:48 crc kubenswrapper[4651]: I1011 05:11:48.959166 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/f157ed46-75e8-4f03-b4ec-1234385015bd-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"f157ed46-75e8-4f03-b4ec-1234385015bd\") " pod="openstack/rabbitmq-server-0" Oct 11 05:11:48 crc kubenswrapper[4651]: I1011 05:11:48.959199 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jtmvc\" (UniqueName: \"kubernetes.io/projected/f157ed46-75e8-4f03-b4ec-1234385015bd-kube-api-access-jtmvc\") pod \"rabbitmq-server-0\" (UID: \"f157ed46-75e8-4f03-b4ec-1234385015bd\") " pod="openstack/rabbitmq-server-0" Oct 11 05:11:48 crc kubenswrapper[4651]: I1011 05:11:48.959225 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/f157ed46-75e8-4f03-b4ec-1234385015bd-pod-info\") pod \"rabbitmq-server-0\" (UID: \"f157ed46-75e8-4f03-b4ec-1234385015bd\") " pod="openstack/rabbitmq-server-0" Oct 11 05:11:48 crc kubenswrapper[4651]: I1011 05:11:48.959254 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/f157ed46-75e8-4f03-b4ec-1234385015bd-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"f157ed46-75e8-4f03-b4ec-1234385015bd\") " pod="openstack/rabbitmq-server-0" Oct 11 05:11:48 crc kubenswrapper[4651]: I1011 05:11:48.959281 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-server-0\" (UID: \"f157ed46-75e8-4f03-b4ec-1234385015bd\") " pod="openstack/rabbitmq-server-0" Oct 11 05:11:48 crc kubenswrapper[4651]: I1011 05:11:48.959325 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/f157ed46-75e8-4f03-b4ec-1234385015bd-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"f157ed46-75e8-4f03-b4ec-1234385015bd\") " pod="openstack/rabbitmq-server-0" Oct 11 05:11:48 crc kubenswrapper[4651]: I1011 05:11:48.959381 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f157ed46-75e8-4f03-b4ec-1234385015bd-config-data\") pod \"rabbitmq-server-0\" (UID: \"f157ed46-75e8-4f03-b4ec-1234385015bd\") " pod="openstack/rabbitmq-server-0" Oct 11 05:11:48 crc kubenswrapper[4651]: I1011 05:11:48.959536 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/f157ed46-75e8-4f03-b4ec-1234385015bd-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"f157ed46-75e8-4f03-b4ec-1234385015bd\") " pod="openstack/rabbitmq-server-0" Oct 11 05:11:48 crc kubenswrapper[4651]: I1011 05:11:48.959668 4651 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-server-0\" (UID: \"f157ed46-75e8-4f03-b4ec-1234385015bd\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/rabbitmq-server-0" Oct 11 05:11:48 crc kubenswrapper[4651]: I1011 05:11:48.959714 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/f157ed46-75e8-4f03-b4ec-1234385015bd-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"f157ed46-75e8-4f03-b4ec-1234385015bd\") " pod="openstack/rabbitmq-server-0" Oct 11 05:11:48 crc kubenswrapper[4651]: I1011 05:11:48.960055 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/f157ed46-75e8-4f03-b4ec-1234385015bd-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"f157ed46-75e8-4f03-b4ec-1234385015bd\") " pod="openstack/rabbitmq-server-0" Oct 11 05:11:48 crc kubenswrapper[4651]: I1011 05:11:48.960083 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/f157ed46-75e8-4f03-b4ec-1234385015bd-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"f157ed46-75e8-4f03-b4ec-1234385015bd\") " pod="openstack/rabbitmq-server-0" Oct 11 05:11:48 crc kubenswrapper[4651]: I1011 05:11:48.960138 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/f157ed46-75e8-4f03-b4ec-1234385015bd-server-conf\") pod \"rabbitmq-server-0\" (UID: \"f157ed46-75e8-4f03-b4ec-1234385015bd\") " pod="openstack/rabbitmq-server-0" Oct 11 05:11:48 crc kubenswrapper[4651]: I1011 05:11:48.960292 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f157ed46-75e8-4f03-b4ec-1234385015bd-config-data\") pod \"rabbitmq-server-0\" (UID: \"f157ed46-75e8-4f03-b4ec-1234385015bd\") " pod="openstack/rabbitmq-server-0" Oct 11 05:11:48 crc kubenswrapper[4651]: I1011 05:11:48.960385 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/f157ed46-75e8-4f03-b4ec-1234385015bd-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"f157ed46-75e8-4f03-b4ec-1234385015bd\") " pod="openstack/rabbitmq-server-0" Oct 11 05:11:48 crc kubenswrapper[4651]: I1011 05:11:48.961376 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/f157ed46-75e8-4f03-b4ec-1234385015bd-server-conf\") pod \"rabbitmq-server-0\" (UID: \"f157ed46-75e8-4f03-b4ec-1234385015bd\") " pod="openstack/rabbitmq-server-0" Oct 11 05:11:48 crc kubenswrapper[4651]: I1011 05:11:48.964131 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/f157ed46-75e8-4f03-b4ec-1234385015bd-pod-info\") pod \"rabbitmq-server-0\" (UID: \"f157ed46-75e8-4f03-b4ec-1234385015bd\") " pod="openstack/rabbitmq-server-0" Oct 11 05:11:48 crc kubenswrapper[4651]: I1011 05:11:48.964769 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/f157ed46-75e8-4f03-b4ec-1234385015bd-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"f157ed46-75e8-4f03-b4ec-1234385015bd\") " pod="openstack/rabbitmq-server-0" Oct 11 05:11:48 crc kubenswrapper[4651]: I1011 05:11:48.968478 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/f157ed46-75e8-4f03-b4ec-1234385015bd-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"f157ed46-75e8-4f03-b4ec-1234385015bd\") " pod="openstack/rabbitmq-server-0" Oct 11 05:11:48 crc kubenswrapper[4651]: I1011 05:11:48.969006 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/f157ed46-75e8-4f03-b4ec-1234385015bd-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"f157ed46-75e8-4f03-b4ec-1234385015bd\") " pod="openstack/rabbitmq-server-0" Oct 11 05:11:48 crc kubenswrapper[4651]: I1011 05:11:48.995755 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jtmvc\" (UniqueName: \"kubernetes.io/projected/f157ed46-75e8-4f03-b4ec-1234385015bd-kube-api-access-jtmvc\") pod \"rabbitmq-server-0\" (UID: \"f157ed46-75e8-4f03-b4ec-1234385015bd\") " pod="openstack/rabbitmq-server-0" Oct 11 05:11:49 crc kubenswrapper[4651]: I1011 05:11:49.001931 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-server-0\" (UID: \"f157ed46-75e8-4f03-b4ec-1234385015bd\") " pod="openstack/rabbitmq-server-0" Oct 11 05:11:49 crc kubenswrapper[4651]: I1011 05:11:49.293121 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 11 05:11:49 crc kubenswrapper[4651]: I1011 05:11:49.499601 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 11 05:11:49 crc kubenswrapper[4651]: I1011 05:11:49.609132 4651 generic.go:334] "Generic (PLEG): container finished" podID="dbfeee44-d2ad-4a4b-814f-916176925aaf" containerID="b59dfb8a86a393f1bb73d7fab6f1f3d33ac444d6555c3ef6692794f083ddbdfc" exitCode=0 Oct 11 05:11:49 crc kubenswrapper[4651]: I1011 05:11:49.609230 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 11 05:11:49 crc kubenswrapper[4651]: I1011 05:11:49.609227 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"dbfeee44-d2ad-4a4b-814f-916176925aaf","Type":"ContainerDied","Data":"b59dfb8a86a393f1bb73d7fab6f1f3d33ac444d6555c3ef6692794f083ddbdfc"} Oct 11 05:11:49 crc kubenswrapper[4651]: I1011 05:11:49.609364 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"dbfeee44-d2ad-4a4b-814f-916176925aaf","Type":"ContainerDied","Data":"eec70c36cad1d1c991e6ef4a4a83710cdc68f8a0b043be49b608c2cd7d765c0c"} Oct 11 05:11:49 crc kubenswrapper[4651]: I1011 05:11:49.609414 4651 scope.go:117] "RemoveContainer" containerID="b59dfb8a86a393f1bb73d7fab6f1f3d33ac444d6555c3ef6692794f083ddbdfc" Oct 11 05:11:49 crc kubenswrapper[4651]: I1011 05:11:49.639992 4651 scope.go:117] "RemoveContainer" containerID="c50a4b72004db94f4ca1044699f14bb5386093354b3b69c9543ac53d702d890e" Oct 11 05:11:49 crc kubenswrapper[4651]: I1011 05:11:49.663999 4651 scope.go:117] "RemoveContainer" containerID="b59dfb8a86a393f1bb73d7fab6f1f3d33ac444d6555c3ef6692794f083ddbdfc" Oct 11 05:11:49 crc kubenswrapper[4651]: E1011 05:11:49.664465 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b59dfb8a86a393f1bb73d7fab6f1f3d33ac444d6555c3ef6692794f083ddbdfc\": container with ID starting with b59dfb8a86a393f1bb73d7fab6f1f3d33ac444d6555c3ef6692794f083ddbdfc not found: ID does not exist" containerID="b59dfb8a86a393f1bb73d7fab6f1f3d33ac444d6555c3ef6692794f083ddbdfc" Oct 11 05:11:49 crc kubenswrapper[4651]: I1011 05:11:49.664538 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b59dfb8a86a393f1bb73d7fab6f1f3d33ac444d6555c3ef6692794f083ddbdfc"} err="failed to get container status \"b59dfb8a86a393f1bb73d7fab6f1f3d33ac444d6555c3ef6692794f083ddbdfc\": rpc error: code = NotFound desc = could not find container \"b59dfb8a86a393f1bb73d7fab6f1f3d33ac444d6555c3ef6692794f083ddbdfc\": container with ID starting with b59dfb8a86a393f1bb73d7fab6f1f3d33ac444d6555c3ef6692794f083ddbdfc not found: ID does not exist" Oct 11 05:11:49 crc kubenswrapper[4651]: I1011 05:11:49.664560 4651 scope.go:117] "RemoveContainer" containerID="c50a4b72004db94f4ca1044699f14bb5386093354b3b69c9543ac53d702d890e" Oct 11 05:11:49 crc kubenswrapper[4651]: E1011 05:11:49.665092 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c50a4b72004db94f4ca1044699f14bb5386093354b3b69c9543ac53d702d890e\": container with ID starting with c50a4b72004db94f4ca1044699f14bb5386093354b3b69c9543ac53d702d890e not found: ID does not exist" containerID="c50a4b72004db94f4ca1044699f14bb5386093354b3b69c9543ac53d702d890e" Oct 11 05:11:49 crc kubenswrapper[4651]: I1011 05:11:49.665137 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c50a4b72004db94f4ca1044699f14bb5386093354b3b69c9543ac53d702d890e"} err="failed to get container status \"c50a4b72004db94f4ca1044699f14bb5386093354b3b69c9543ac53d702d890e\": rpc error: code = NotFound desc = could not find container \"c50a4b72004db94f4ca1044699f14bb5386093354b3b69c9543ac53d702d890e\": container with ID starting with c50a4b72004db94f4ca1044699f14bb5386093354b3b69c9543ac53d702d890e not found: ID does not exist" Oct 11 05:11:49 crc kubenswrapper[4651]: I1011 05:11:49.676417 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/dbfeee44-d2ad-4a4b-814f-916176925aaf-erlang-cookie-secret\") pod \"dbfeee44-d2ad-4a4b-814f-916176925aaf\" (UID: \"dbfeee44-d2ad-4a4b-814f-916176925aaf\") " Oct 11 05:11:49 crc kubenswrapper[4651]: I1011 05:11:49.676461 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/dbfeee44-d2ad-4a4b-814f-916176925aaf-rabbitmq-erlang-cookie\") pod \"dbfeee44-d2ad-4a4b-814f-916176925aaf\" (UID: \"dbfeee44-d2ad-4a4b-814f-916176925aaf\") " Oct 11 05:11:49 crc kubenswrapper[4651]: I1011 05:11:49.676507 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/dbfeee44-d2ad-4a4b-814f-916176925aaf-server-conf\") pod \"dbfeee44-d2ad-4a4b-814f-916176925aaf\" (UID: \"dbfeee44-d2ad-4a4b-814f-916176925aaf\") " Oct 11 05:11:49 crc kubenswrapper[4651]: I1011 05:11:49.676556 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/dbfeee44-d2ad-4a4b-814f-916176925aaf-rabbitmq-tls\") pod \"dbfeee44-d2ad-4a4b-814f-916176925aaf\" (UID: \"dbfeee44-d2ad-4a4b-814f-916176925aaf\") " Oct 11 05:11:49 crc kubenswrapper[4651]: I1011 05:11:49.676660 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/dbfeee44-d2ad-4a4b-814f-916176925aaf-plugins-conf\") pod \"dbfeee44-d2ad-4a4b-814f-916176925aaf\" (UID: \"dbfeee44-d2ad-4a4b-814f-916176925aaf\") " Oct 11 05:11:49 crc kubenswrapper[4651]: I1011 05:11:49.676692 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rz57r\" (UniqueName: \"kubernetes.io/projected/dbfeee44-d2ad-4a4b-814f-916176925aaf-kube-api-access-rz57r\") pod \"dbfeee44-d2ad-4a4b-814f-916176925aaf\" (UID: \"dbfeee44-d2ad-4a4b-814f-916176925aaf\") " Oct 11 05:11:49 crc kubenswrapper[4651]: I1011 05:11:49.676712 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/dbfeee44-d2ad-4a4b-814f-916176925aaf-rabbitmq-confd\") pod \"dbfeee44-d2ad-4a4b-814f-916176925aaf\" (UID: \"dbfeee44-d2ad-4a4b-814f-916176925aaf\") " Oct 11 05:11:49 crc kubenswrapper[4651]: I1011 05:11:49.676736 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/dbfeee44-d2ad-4a4b-814f-916176925aaf-rabbitmq-plugins\") pod \"dbfeee44-d2ad-4a4b-814f-916176925aaf\" (UID: \"dbfeee44-d2ad-4a4b-814f-916176925aaf\") " Oct 11 05:11:49 crc kubenswrapper[4651]: I1011 05:11:49.676792 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/dbfeee44-d2ad-4a4b-814f-916176925aaf-pod-info\") pod \"dbfeee44-d2ad-4a4b-814f-916176925aaf\" (UID: \"dbfeee44-d2ad-4a4b-814f-916176925aaf\") " Oct 11 05:11:49 crc kubenswrapper[4651]: I1011 05:11:49.676876 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/dbfeee44-d2ad-4a4b-814f-916176925aaf-config-data\") pod \"dbfeee44-d2ad-4a4b-814f-916176925aaf\" (UID: \"dbfeee44-d2ad-4a4b-814f-916176925aaf\") " Oct 11 05:11:49 crc kubenswrapper[4651]: I1011 05:11:49.676918 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"dbfeee44-d2ad-4a4b-814f-916176925aaf\" (UID: \"dbfeee44-d2ad-4a4b-814f-916176925aaf\") " Oct 11 05:11:49 crc kubenswrapper[4651]: I1011 05:11:49.677427 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dbfeee44-d2ad-4a4b-814f-916176925aaf-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "dbfeee44-d2ad-4a4b-814f-916176925aaf" (UID: "dbfeee44-d2ad-4a4b-814f-916176925aaf"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 05:11:49 crc kubenswrapper[4651]: I1011 05:11:49.678043 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dbfeee44-d2ad-4a4b-814f-916176925aaf-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "dbfeee44-d2ad-4a4b-814f-916176925aaf" (UID: "dbfeee44-d2ad-4a4b-814f-916176925aaf"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 05:11:49 crc kubenswrapper[4651]: I1011 05:11:49.678484 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dbfeee44-d2ad-4a4b-814f-916176925aaf-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "dbfeee44-d2ad-4a4b-814f-916176925aaf" (UID: "dbfeee44-d2ad-4a4b-814f-916176925aaf"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:11:49 crc kubenswrapper[4651]: I1011 05:11:49.681983 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/dbfeee44-d2ad-4a4b-814f-916176925aaf-pod-info" (OuterVolumeSpecName: "pod-info") pod "dbfeee44-d2ad-4a4b-814f-916176925aaf" (UID: "dbfeee44-d2ad-4a4b-814f-916176925aaf"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Oct 11 05:11:49 crc kubenswrapper[4651]: I1011 05:11:49.682024 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage06-crc" (OuterVolumeSpecName: "persistence") pod "dbfeee44-d2ad-4a4b-814f-916176925aaf" (UID: "dbfeee44-d2ad-4a4b-814f-916176925aaf"). InnerVolumeSpecName "local-storage06-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 11 05:11:49 crc kubenswrapper[4651]: I1011 05:11:49.682988 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dbfeee44-d2ad-4a4b-814f-916176925aaf-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "dbfeee44-d2ad-4a4b-814f-916176925aaf" (UID: "dbfeee44-d2ad-4a4b-814f-916176925aaf"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:11:49 crc kubenswrapper[4651]: I1011 05:11:49.683760 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dbfeee44-d2ad-4a4b-814f-916176925aaf-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "dbfeee44-d2ad-4a4b-814f-916176925aaf" (UID: "dbfeee44-d2ad-4a4b-814f-916176925aaf"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:11:49 crc kubenswrapper[4651]: I1011 05:11:49.684186 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dbfeee44-d2ad-4a4b-814f-916176925aaf-kube-api-access-rz57r" (OuterVolumeSpecName: "kube-api-access-rz57r") pod "dbfeee44-d2ad-4a4b-814f-916176925aaf" (UID: "dbfeee44-d2ad-4a4b-814f-916176925aaf"). InnerVolumeSpecName "kube-api-access-rz57r". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:11:49 crc kubenswrapper[4651]: I1011 05:11:49.706025 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dbfeee44-d2ad-4a4b-814f-916176925aaf-config-data" (OuterVolumeSpecName: "config-data") pod "dbfeee44-d2ad-4a4b-814f-916176925aaf" (UID: "dbfeee44-d2ad-4a4b-814f-916176925aaf"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:11:49 crc kubenswrapper[4651]: I1011 05:11:49.739890 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dbfeee44-d2ad-4a4b-814f-916176925aaf-server-conf" (OuterVolumeSpecName: "server-conf") pod "dbfeee44-d2ad-4a4b-814f-916176925aaf" (UID: "dbfeee44-d2ad-4a4b-814f-916176925aaf"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:11:49 crc kubenswrapper[4651]: I1011 05:11:49.779624 4651 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/dbfeee44-d2ad-4a4b-814f-916176925aaf-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Oct 11 05:11:49 crc kubenswrapper[4651]: I1011 05:11:49.779658 4651 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/dbfeee44-d2ad-4a4b-814f-916176925aaf-plugins-conf\") on node \"crc\" DevicePath \"\"" Oct 11 05:11:49 crc kubenswrapper[4651]: I1011 05:11:49.779668 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rz57r\" (UniqueName: \"kubernetes.io/projected/dbfeee44-d2ad-4a4b-814f-916176925aaf-kube-api-access-rz57r\") on node \"crc\" DevicePath \"\"" Oct 11 05:11:49 crc kubenswrapper[4651]: I1011 05:11:49.779680 4651 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/dbfeee44-d2ad-4a4b-814f-916176925aaf-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Oct 11 05:11:49 crc kubenswrapper[4651]: I1011 05:11:49.779688 4651 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/dbfeee44-d2ad-4a4b-814f-916176925aaf-pod-info\") on node \"crc\" DevicePath \"\"" Oct 11 05:11:49 crc kubenswrapper[4651]: I1011 05:11:49.779697 4651 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/dbfeee44-d2ad-4a4b-814f-916176925aaf-config-data\") on node \"crc\" DevicePath \"\"" Oct 11 05:11:49 crc kubenswrapper[4651]: I1011 05:11:49.779716 4651 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" " Oct 11 05:11:49 crc kubenswrapper[4651]: I1011 05:11:49.779725 4651 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/dbfeee44-d2ad-4a4b-814f-916176925aaf-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Oct 11 05:11:49 crc kubenswrapper[4651]: I1011 05:11:49.779734 4651 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/dbfeee44-d2ad-4a4b-814f-916176925aaf-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Oct 11 05:11:49 crc kubenswrapper[4651]: I1011 05:11:49.779742 4651 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/dbfeee44-d2ad-4a4b-814f-916176925aaf-server-conf\") on node \"crc\" DevicePath \"\"" Oct 11 05:11:49 crc kubenswrapper[4651]: I1011 05:11:49.795491 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dbfeee44-d2ad-4a4b-814f-916176925aaf-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "dbfeee44-d2ad-4a4b-814f-916176925aaf" (UID: "dbfeee44-d2ad-4a4b-814f-916176925aaf"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:11:49 crc kubenswrapper[4651]: I1011 05:11:49.800501 4651 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage06-crc" (UniqueName: "kubernetes.io/local-volume/local-storage06-crc") on node "crc" Oct 11 05:11:49 crc kubenswrapper[4651]: I1011 05:11:49.846653 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 11 05:11:49 crc kubenswrapper[4651]: I1011 05:11:49.881621 4651 reconciler_common.go:293] "Volume detached for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" DevicePath \"\"" Oct 11 05:11:49 crc kubenswrapper[4651]: I1011 05:11:49.881658 4651 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/dbfeee44-d2ad-4a4b-814f-916176925aaf-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Oct 11 05:11:49 crc kubenswrapper[4651]: I1011 05:11:49.903476 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e73b125b-a52b-44bb-bbed-3a484f53a9cb" path="/var/lib/kubelet/pods/e73b125b-a52b-44bb-bbed-3a484f53a9cb/volumes" Oct 11 05:11:50 crc kubenswrapper[4651]: I1011 05:11:50.625581 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"f157ed46-75e8-4f03-b4ec-1234385015bd","Type":"ContainerStarted","Data":"32f2cf52071d766e4f899ef45e6675b6da7588236b0d4ee825c61552cec4ce46"} Oct 11 05:11:51 crc kubenswrapper[4651]: I1011 05:11:51.640201 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"f157ed46-75e8-4f03-b4ec-1234385015bd","Type":"ContainerStarted","Data":"e8c9c7b02cc1d55fc2976f945c124ba54405fddf552bfc3cf3276b05d325b2b0"} Oct 11 05:11:53 crc kubenswrapper[4651]: I1011 05:11:53.531737 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5576978c7c-bxvjf"] Oct 11 05:11:53 crc kubenswrapper[4651]: E1011 05:11:53.532431 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dbfeee44-d2ad-4a4b-814f-916176925aaf" containerName="rabbitmq" Oct 11 05:11:53 crc kubenswrapper[4651]: I1011 05:11:53.532446 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="dbfeee44-d2ad-4a4b-814f-916176925aaf" containerName="rabbitmq" Oct 11 05:11:53 crc kubenswrapper[4651]: E1011 05:11:53.532471 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dbfeee44-d2ad-4a4b-814f-916176925aaf" containerName="setup-container" Oct 11 05:11:53 crc kubenswrapper[4651]: I1011 05:11:53.532481 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="dbfeee44-d2ad-4a4b-814f-916176925aaf" containerName="setup-container" Oct 11 05:11:53 crc kubenswrapper[4651]: I1011 05:11:53.532693 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="dbfeee44-d2ad-4a4b-814f-916176925aaf" containerName="rabbitmq" Oct 11 05:11:53 crc kubenswrapper[4651]: I1011 05:11:53.533674 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5576978c7c-bxvjf" Oct 11 05:11:53 crc kubenswrapper[4651]: I1011 05:11:53.535645 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-edpm-ipam" Oct 11 05:11:53 crc kubenswrapper[4651]: I1011 05:11:53.550008 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5576978c7c-bxvjf"] Oct 11 05:11:53 crc kubenswrapper[4651]: I1011 05:11:53.657256 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f70e3461-5292-4c8e-b369-f54726da8b01-dns-swift-storage-0\") pod \"dnsmasq-dns-5576978c7c-bxvjf\" (UID: \"f70e3461-5292-4c8e-b369-f54726da8b01\") " pod="openstack/dnsmasq-dns-5576978c7c-bxvjf" Oct 11 05:11:53 crc kubenswrapper[4651]: I1011 05:11:53.657305 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f70e3461-5292-4c8e-b369-f54726da8b01-config\") pod \"dnsmasq-dns-5576978c7c-bxvjf\" (UID: \"f70e3461-5292-4c8e-b369-f54726da8b01\") " pod="openstack/dnsmasq-dns-5576978c7c-bxvjf" Oct 11 05:11:53 crc kubenswrapper[4651]: I1011 05:11:53.657330 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f70e3461-5292-4c8e-b369-f54726da8b01-dns-svc\") pod \"dnsmasq-dns-5576978c7c-bxvjf\" (UID: \"f70e3461-5292-4c8e-b369-f54726da8b01\") " pod="openstack/dnsmasq-dns-5576978c7c-bxvjf" Oct 11 05:11:53 crc kubenswrapper[4651]: I1011 05:11:53.657350 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/f70e3461-5292-4c8e-b369-f54726da8b01-openstack-edpm-ipam\") pod \"dnsmasq-dns-5576978c7c-bxvjf\" (UID: \"f70e3461-5292-4c8e-b369-f54726da8b01\") " pod="openstack/dnsmasq-dns-5576978c7c-bxvjf" Oct 11 05:11:53 crc kubenswrapper[4651]: I1011 05:11:53.657368 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9lcfj\" (UniqueName: \"kubernetes.io/projected/f70e3461-5292-4c8e-b369-f54726da8b01-kube-api-access-9lcfj\") pod \"dnsmasq-dns-5576978c7c-bxvjf\" (UID: \"f70e3461-5292-4c8e-b369-f54726da8b01\") " pod="openstack/dnsmasq-dns-5576978c7c-bxvjf" Oct 11 05:11:53 crc kubenswrapper[4651]: I1011 05:11:53.657485 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f70e3461-5292-4c8e-b369-f54726da8b01-ovsdbserver-sb\") pod \"dnsmasq-dns-5576978c7c-bxvjf\" (UID: \"f70e3461-5292-4c8e-b369-f54726da8b01\") " pod="openstack/dnsmasq-dns-5576978c7c-bxvjf" Oct 11 05:11:53 crc kubenswrapper[4651]: I1011 05:11:53.657655 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f70e3461-5292-4c8e-b369-f54726da8b01-ovsdbserver-nb\") pod \"dnsmasq-dns-5576978c7c-bxvjf\" (UID: \"f70e3461-5292-4c8e-b369-f54726da8b01\") " pod="openstack/dnsmasq-dns-5576978c7c-bxvjf" Oct 11 05:11:53 crc kubenswrapper[4651]: I1011 05:11:53.760288 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f70e3461-5292-4c8e-b369-f54726da8b01-ovsdbserver-nb\") pod \"dnsmasq-dns-5576978c7c-bxvjf\" (UID: \"f70e3461-5292-4c8e-b369-f54726da8b01\") " pod="openstack/dnsmasq-dns-5576978c7c-bxvjf" Oct 11 05:11:53 crc kubenswrapper[4651]: I1011 05:11:53.760546 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f70e3461-5292-4c8e-b369-f54726da8b01-dns-swift-storage-0\") pod \"dnsmasq-dns-5576978c7c-bxvjf\" (UID: \"f70e3461-5292-4c8e-b369-f54726da8b01\") " pod="openstack/dnsmasq-dns-5576978c7c-bxvjf" Oct 11 05:11:53 crc kubenswrapper[4651]: I1011 05:11:53.760582 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f70e3461-5292-4c8e-b369-f54726da8b01-config\") pod \"dnsmasq-dns-5576978c7c-bxvjf\" (UID: \"f70e3461-5292-4c8e-b369-f54726da8b01\") " pod="openstack/dnsmasq-dns-5576978c7c-bxvjf" Oct 11 05:11:53 crc kubenswrapper[4651]: I1011 05:11:53.761169 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f70e3461-5292-4c8e-b369-f54726da8b01-dns-svc\") pod \"dnsmasq-dns-5576978c7c-bxvjf\" (UID: \"f70e3461-5292-4c8e-b369-f54726da8b01\") " pod="openstack/dnsmasq-dns-5576978c7c-bxvjf" Oct 11 05:11:53 crc kubenswrapper[4651]: I1011 05:11:53.761208 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/f70e3461-5292-4c8e-b369-f54726da8b01-openstack-edpm-ipam\") pod \"dnsmasq-dns-5576978c7c-bxvjf\" (UID: \"f70e3461-5292-4c8e-b369-f54726da8b01\") " pod="openstack/dnsmasq-dns-5576978c7c-bxvjf" Oct 11 05:11:53 crc kubenswrapper[4651]: I1011 05:11:53.761233 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9lcfj\" (UniqueName: \"kubernetes.io/projected/f70e3461-5292-4c8e-b369-f54726da8b01-kube-api-access-9lcfj\") pod \"dnsmasq-dns-5576978c7c-bxvjf\" (UID: \"f70e3461-5292-4c8e-b369-f54726da8b01\") " pod="openstack/dnsmasq-dns-5576978c7c-bxvjf" Oct 11 05:11:53 crc kubenswrapper[4651]: I1011 05:11:53.761262 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f70e3461-5292-4c8e-b369-f54726da8b01-ovsdbserver-sb\") pod \"dnsmasq-dns-5576978c7c-bxvjf\" (UID: \"f70e3461-5292-4c8e-b369-f54726da8b01\") " pod="openstack/dnsmasq-dns-5576978c7c-bxvjf" Oct 11 05:11:53 crc kubenswrapper[4651]: I1011 05:11:53.761263 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f70e3461-5292-4c8e-b369-f54726da8b01-ovsdbserver-nb\") pod \"dnsmasq-dns-5576978c7c-bxvjf\" (UID: \"f70e3461-5292-4c8e-b369-f54726da8b01\") " pod="openstack/dnsmasq-dns-5576978c7c-bxvjf" Oct 11 05:11:53 crc kubenswrapper[4651]: I1011 05:11:53.761464 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f70e3461-5292-4c8e-b369-f54726da8b01-config\") pod \"dnsmasq-dns-5576978c7c-bxvjf\" (UID: \"f70e3461-5292-4c8e-b369-f54726da8b01\") " pod="openstack/dnsmasq-dns-5576978c7c-bxvjf" Oct 11 05:11:53 crc kubenswrapper[4651]: I1011 05:11:53.761644 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f70e3461-5292-4c8e-b369-f54726da8b01-dns-swift-storage-0\") pod \"dnsmasq-dns-5576978c7c-bxvjf\" (UID: \"f70e3461-5292-4c8e-b369-f54726da8b01\") " pod="openstack/dnsmasq-dns-5576978c7c-bxvjf" Oct 11 05:11:53 crc kubenswrapper[4651]: I1011 05:11:53.761950 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f70e3461-5292-4c8e-b369-f54726da8b01-dns-svc\") pod \"dnsmasq-dns-5576978c7c-bxvjf\" (UID: \"f70e3461-5292-4c8e-b369-f54726da8b01\") " pod="openstack/dnsmasq-dns-5576978c7c-bxvjf" Oct 11 05:11:53 crc kubenswrapper[4651]: I1011 05:11:53.762154 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/f70e3461-5292-4c8e-b369-f54726da8b01-openstack-edpm-ipam\") pod \"dnsmasq-dns-5576978c7c-bxvjf\" (UID: \"f70e3461-5292-4c8e-b369-f54726da8b01\") " pod="openstack/dnsmasq-dns-5576978c7c-bxvjf" Oct 11 05:11:53 crc kubenswrapper[4651]: I1011 05:11:53.762478 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f70e3461-5292-4c8e-b369-f54726da8b01-ovsdbserver-sb\") pod \"dnsmasq-dns-5576978c7c-bxvjf\" (UID: \"f70e3461-5292-4c8e-b369-f54726da8b01\") " pod="openstack/dnsmasq-dns-5576978c7c-bxvjf" Oct 11 05:11:53 crc kubenswrapper[4651]: I1011 05:11:53.781805 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9lcfj\" (UniqueName: \"kubernetes.io/projected/f70e3461-5292-4c8e-b369-f54726da8b01-kube-api-access-9lcfj\") pod \"dnsmasq-dns-5576978c7c-bxvjf\" (UID: \"f70e3461-5292-4c8e-b369-f54726da8b01\") " pod="openstack/dnsmasq-dns-5576978c7c-bxvjf" Oct 11 05:11:53 crc kubenswrapper[4651]: I1011 05:11:53.858512 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5576978c7c-bxvjf" Oct 11 05:11:54 crc kubenswrapper[4651]: I1011 05:11:54.377010 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5576978c7c-bxvjf"] Oct 11 05:11:54 crc kubenswrapper[4651]: W1011 05:11:54.387783 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf70e3461_5292_4c8e_b369_f54726da8b01.slice/crio-283e910b86a4224b615c619359a4f8cf32ac7d753a98311b0fdb54264d508ed4 WatchSource:0}: Error finding container 283e910b86a4224b615c619359a4f8cf32ac7d753a98311b0fdb54264d508ed4: Status 404 returned error can't find the container with id 283e910b86a4224b615c619359a4f8cf32ac7d753a98311b0fdb54264d508ed4 Oct 11 05:11:54 crc kubenswrapper[4651]: I1011 05:11:54.670707 4651 generic.go:334] "Generic (PLEG): container finished" podID="f70e3461-5292-4c8e-b369-f54726da8b01" containerID="4097aaeb0799c6e0e97e9efe419760acf6568de01d8b673c6baa184b1e09a886" exitCode=0 Oct 11 05:11:54 crc kubenswrapper[4651]: I1011 05:11:54.670786 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5576978c7c-bxvjf" event={"ID":"f70e3461-5292-4c8e-b369-f54726da8b01","Type":"ContainerDied","Data":"4097aaeb0799c6e0e97e9efe419760acf6568de01d8b673c6baa184b1e09a886"} Oct 11 05:11:54 crc kubenswrapper[4651]: I1011 05:11:54.671083 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5576978c7c-bxvjf" event={"ID":"f70e3461-5292-4c8e-b369-f54726da8b01","Type":"ContainerStarted","Data":"283e910b86a4224b615c619359a4f8cf32ac7d753a98311b0fdb54264d508ed4"} Oct 11 05:11:55 crc kubenswrapper[4651]: I1011 05:11:55.687942 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5576978c7c-bxvjf" event={"ID":"f70e3461-5292-4c8e-b369-f54726da8b01","Type":"ContainerStarted","Data":"35f98ba3f1b8ddcae5f617bd67eeaefb8ab9e2dc6c44e429a5a48b838643a835"} Oct 11 05:11:55 crc kubenswrapper[4651]: I1011 05:11:55.688555 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5576978c7c-bxvjf" Oct 11 05:11:55 crc kubenswrapper[4651]: I1011 05:11:55.729581 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5576978c7c-bxvjf" podStartSLOduration=2.729556042 podStartE2EDuration="2.729556042s" podCreationTimestamp="2025-10-11 05:11:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 05:11:55.716717565 +0000 UTC m=+1236.612950441" watchObservedRunningTime="2025-10-11 05:11:55.729556042 +0000 UTC m=+1236.625788868" Oct 11 05:12:03 crc kubenswrapper[4651]: I1011 05:12:03.861884 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5576978c7c-bxvjf" Oct 11 05:12:03 crc kubenswrapper[4651]: I1011 05:12:03.941006 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c7b6c5df9-twwgp"] Oct 11 05:12:03 crc kubenswrapper[4651]: I1011 05:12:03.941220 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5c7b6c5df9-twwgp" podUID="07814860-7a0a-48b8-996c-3472c44897f3" containerName="dnsmasq-dns" containerID="cri-o://a3f7364e84451290999c632504bf3c62cfde20321efb17b0b64c351964edbd97" gracePeriod=10 Oct 11 05:12:04 crc kubenswrapper[4651]: I1011 05:12:04.129294 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-8c6f6df99-jzqc8"] Oct 11 05:12:04 crc kubenswrapper[4651]: I1011 05:12:04.135151 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8c6f6df99-jzqc8" Oct 11 05:12:04 crc kubenswrapper[4651]: I1011 05:12:04.143547 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8c6f6df99-jzqc8"] Oct 11 05:12:04 crc kubenswrapper[4651]: I1011 05:12:04.290174 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/6db2b942-f99f-417d-aff6-a37800db6a41-openstack-edpm-ipam\") pod \"dnsmasq-dns-8c6f6df99-jzqc8\" (UID: \"6db2b942-f99f-417d-aff6-a37800db6a41\") " pod="openstack/dnsmasq-dns-8c6f6df99-jzqc8" Oct 11 05:12:04 crc kubenswrapper[4651]: I1011 05:12:04.290224 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6db2b942-f99f-417d-aff6-a37800db6a41-config\") pod \"dnsmasq-dns-8c6f6df99-jzqc8\" (UID: \"6db2b942-f99f-417d-aff6-a37800db6a41\") " pod="openstack/dnsmasq-dns-8c6f6df99-jzqc8" Oct 11 05:12:04 crc kubenswrapper[4651]: I1011 05:12:04.290309 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6db2b942-f99f-417d-aff6-a37800db6a41-dns-svc\") pod \"dnsmasq-dns-8c6f6df99-jzqc8\" (UID: \"6db2b942-f99f-417d-aff6-a37800db6a41\") " pod="openstack/dnsmasq-dns-8c6f6df99-jzqc8" Oct 11 05:12:04 crc kubenswrapper[4651]: I1011 05:12:04.290371 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6db2b942-f99f-417d-aff6-a37800db6a41-ovsdbserver-nb\") pod \"dnsmasq-dns-8c6f6df99-jzqc8\" (UID: \"6db2b942-f99f-417d-aff6-a37800db6a41\") " pod="openstack/dnsmasq-dns-8c6f6df99-jzqc8" Oct 11 05:12:04 crc kubenswrapper[4651]: I1011 05:12:04.290393 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6db2b942-f99f-417d-aff6-a37800db6a41-dns-swift-storage-0\") pod \"dnsmasq-dns-8c6f6df99-jzqc8\" (UID: \"6db2b942-f99f-417d-aff6-a37800db6a41\") " pod="openstack/dnsmasq-dns-8c6f6df99-jzqc8" Oct 11 05:12:04 crc kubenswrapper[4651]: I1011 05:12:04.290434 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6db2b942-f99f-417d-aff6-a37800db6a41-ovsdbserver-sb\") pod \"dnsmasq-dns-8c6f6df99-jzqc8\" (UID: \"6db2b942-f99f-417d-aff6-a37800db6a41\") " pod="openstack/dnsmasq-dns-8c6f6df99-jzqc8" Oct 11 05:12:04 crc kubenswrapper[4651]: I1011 05:12:04.290582 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p4mcc\" (UniqueName: \"kubernetes.io/projected/6db2b942-f99f-417d-aff6-a37800db6a41-kube-api-access-p4mcc\") pod \"dnsmasq-dns-8c6f6df99-jzqc8\" (UID: \"6db2b942-f99f-417d-aff6-a37800db6a41\") " pod="openstack/dnsmasq-dns-8c6f6df99-jzqc8" Oct 11 05:12:04 crc kubenswrapper[4651]: I1011 05:12:04.392640 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6db2b942-f99f-417d-aff6-a37800db6a41-ovsdbserver-nb\") pod \"dnsmasq-dns-8c6f6df99-jzqc8\" (UID: \"6db2b942-f99f-417d-aff6-a37800db6a41\") " pod="openstack/dnsmasq-dns-8c6f6df99-jzqc8" Oct 11 05:12:04 crc kubenswrapper[4651]: I1011 05:12:04.392698 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6db2b942-f99f-417d-aff6-a37800db6a41-dns-swift-storage-0\") pod \"dnsmasq-dns-8c6f6df99-jzqc8\" (UID: \"6db2b942-f99f-417d-aff6-a37800db6a41\") " pod="openstack/dnsmasq-dns-8c6f6df99-jzqc8" Oct 11 05:12:04 crc kubenswrapper[4651]: I1011 05:12:04.392758 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6db2b942-f99f-417d-aff6-a37800db6a41-ovsdbserver-sb\") pod \"dnsmasq-dns-8c6f6df99-jzqc8\" (UID: \"6db2b942-f99f-417d-aff6-a37800db6a41\") " pod="openstack/dnsmasq-dns-8c6f6df99-jzqc8" Oct 11 05:12:04 crc kubenswrapper[4651]: I1011 05:12:04.392789 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p4mcc\" (UniqueName: \"kubernetes.io/projected/6db2b942-f99f-417d-aff6-a37800db6a41-kube-api-access-p4mcc\") pod \"dnsmasq-dns-8c6f6df99-jzqc8\" (UID: \"6db2b942-f99f-417d-aff6-a37800db6a41\") " pod="openstack/dnsmasq-dns-8c6f6df99-jzqc8" Oct 11 05:12:04 crc kubenswrapper[4651]: I1011 05:12:04.392845 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/6db2b942-f99f-417d-aff6-a37800db6a41-openstack-edpm-ipam\") pod \"dnsmasq-dns-8c6f6df99-jzqc8\" (UID: \"6db2b942-f99f-417d-aff6-a37800db6a41\") " pod="openstack/dnsmasq-dns-8c6f6df99-jzqc8" Oct 11 05:12:04 crc kubenswrapper[4651]: I1011 05:12:04.392868 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6db2b942-f99f-417d-aff6-a37800db6a41-config\") pod \"dnsmasq-dns-8c6f6df99-jzqc8\" (UID: \"6db2b942-f99f-417d-aff6-a37800db6a41\") " pod="openstack/dnsmasq-dns-8c6f6df99-jzqc8" Oct 11 05:12:04 crc kubenswrapper[4651]: I1011 05:12:04.392940 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6db2b942-f99f-417d-aff6-a37800db6a41-dns-svc\") pod \"dnsmasq-dns-8c6f6df99-jzqc8\" (UID: \"6db2b942-f99f-417d-aff6-a37800db6a41\") " pod="openstack/dnsmasq-dns-8c6f6df99-jzqc8" Oct 11 05:12:04 crc kubenswrapper[4651]: I1011 05:12:04.394210 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6db2b942-f99f-417d-aff6-a37800db6a41-dns-svc\") pod \"dnsmasq-dns-8c6f6df99-jzqc8\" (UID: \"6db2b942-f99f-417d-aff6-a37800db6a41\") " pod="openstack/dnsmasq-dns-8c6f6df99-jzqc8" Oct 11 05:12:04 crc kubenswrapper[4651]: I1011 05:12:04.394314 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6db2b942-f99f-417d-aff6-a37800db6a41-ovsdbserver-nb\") pod \"dnsmasq-dns-8c6f6df99-jzqc8\" (UID: \"6db2b942-f99f-417d-aff6-a37800db6a41\") " pod="openstack/dnsmasq-dns-8c6f6df99-jzqc8" Oct 11 05:12:04 crc kubenswrapper[4651]: I1011 05:12:04.394887 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/6db2b942-f99f-417d-aff6-a37800db6a41-openstack-edpm-ipam\") pod \"dnsmasq-dns-8c6f6df99-jzqc8\" (UID: \"6db2b942-f99f-417d-aff6-a37800db6a41\") " pod="openstack/dnsmasq-dns-8c6f6df99-jzqc8" Oct 11 05:12:04 crc kubenswrapper[4651]: I1011 05:12:04.395429 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6db2b942-f99f-417d-aff6-a37800db6a41-config\") pod \"dnsmasq-dns-8c6f6df99-jzqc8\" (UID: \"6db2b942-f99f-417d-aff6-a37800db6a41\") " pod="openstack/dnsmasq-dns-8c6f6df99-jzqc8" Oct 11 05:12:04 crc kubenswrapper[4651]: I1011 05:12:04.395453 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6db2b942-f99f-417d-aff6-a37800db6a41-ovsdbserver-sb\") pod \"dnsmasq-dns-8c6f6df99-jzqc8\" (UID: \"6db2b942-f99f-417d-aff6-a37800db6a41\") " pod="openstack/dnsmasq-dns-8c6f6df99-jzqc8" Oct 11 05:12:04 crc kubenswrapper[4651]: I1011 05:12:04.396145 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6db2b942-f99f-417d-aff6-a37800db6a41-dns-swift-storage-0\") pod \"dnsmasq-dns-8c6f6df99-jzqc8\" (UID: \"6db2b942-f99f-417d-aff6-a37800db6a41\") " pod="openstack/dnsmasq-dns-8c6f6df99-jzqc8" Oct 11 05:12:04 crc kubenswrapper[4651]: I1011 05:12:04.413862 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p4mcc\" (UniqueName: \"kubernetes.io/projected/6db2b942-f99f-417d-aff6-a37800db6a41-kube-api-access-p4mcc\") pod \"dnsmasq-dns-8c6f6df99-jzqc8\" (UID: \"6db2b942-f99f-417d-aff6-a37800db6a41\") " pod="openstack/dnsmasq-dns-8c6f6df99-jzqc8" Oct 11 05:12:04 crc kubenswrapper[4651]: I1011 05:12:04.468508 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8c6f6df99-jzqc8" Oct 11 05:12:04 crc kubenswrapper[4651]: I1011 05:12:04.597449 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c7b6c5df9-twwgp" Oct 11 05:12:04 crc kubenswrapper[4651]: I1011 05:12:04.699527 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/07814860-7a0a-48b8-996c-3472c44897f3-ovsdbserver-sb\") pod \"07814860-7a0a-48b8-996c-3472c44897f3\" (UID: \"07814860-7a0a-48b8-996c-3472c44897f3\") " Oct 11 05:12:04 crc kubenswrapper[4651]: I1011 05:12:04.699850 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/07814860-7a0a-48b8-996c-3472c44897f3-config\") pod \"07814860-7a0a-48b8-996c-3472c44897f3\" (UID: \"07814860-7a0a-48b8-996c-3472c44897f3\") " Oct 11 05:12:04 crc kubenswrapper[4651]: I1011 05:12:04.699896 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/07814860-7a0a-48b8-996c-3472c44897f3-ovsdbserver-nb\") pod \"07814860-7a0a-48b8-996c-3472c44897f3\" (UID: \"07814860-7a0a-48b8-996c-3472c44897f3\") " Oct 11 05:12:04 crc kubenswrapper[4651]: I1011 05:12:04.699928 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/07814860-7a0a-48b8-996c-3472c44897f3-dns-svc\") pod \"07814860-7a0a-48b8-996c-3472c44897f3\" (UID: \"07814860-7a0a-48b8-996c-3472c44897f3\") " Oct 11 05:12:04 crc kubenswrapper[4651]: I1011 05:12:04.700068 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5jlrz\" (UniqueName: \"kubernetes.io/projected/07814860-7a0a-48b8-996c-3472c44897f3-kube-api-access-5jlrz\") pod \"07814860-7a0a-48b8-996c-3472c44897f3\" (UID: \"07814860-7a0a-48b8-996c-3472c44897f3\") " Oct 11 05:12:04 crc kubenswrapper[4651]: I1011 05:12:04.700124 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/07814860-7a0a-48b8-996c-3472c44897f3-dns-swift-storage-0\") pod \"07814860-7a0a-48b8-996c-3472c44897f3\" (UID: \"07814860-7a0a-48b8-996c-3472c44897f3\") " Oct 11 05:12:04 crc kubenswrapper[4651]: I1011 05:12:04.706927 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/07814860-7a0a-48b8-996c-3472c44897f3-kube-api-access-5jlrz" (OuterVolumeSpecName: "kube-api-access-5jlrz") pod "07814860-7a0a-48b8-996c-3472c44897f3" (UID: "07814860-7a0a-48b8-996c-3472c44897f3"). InnerVolumeSpecName "kube-api-access-5jlrz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:12:04 crc kubenswrapper[4651]: I1011 05:12:04.753730 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/07814860-7a0a-48b8-996c-3472c44897f3-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "07814860-7a0a-48b8-996c-3472c44897f3" (UID: "07814860-7a0a-48b8-996c-3472c44897f3"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:12:04 crc kubenswrapper[4651]: I1011 05:12:04.755044 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/07814860-7a0a-48b8-996c-3472c44897f3-config" (OuterVolumeSpecName: "config") pod "07814860-7a0a-48b8-996c-3472c44897f3" (UID: "07814860-7a0a-48b8-996c-3472c44897f3"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:12:04 crc kubenswrapper[4651]: I1011 05:12:04.756329 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/07814860-7a0a-48b8-996c-3472c44897f3-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "07814860-7a0a-48b8-996c-3472c44897f3" (UID: "07814860-7a0a-48b8-996c-3472c44897f3"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:12:04 crc kubenswrapper[4651]: I1011 05:12:04.758650 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/07814860-7a0a-48b8-996c-3472c44897f3-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "07814860-7a0a-48b8-996c-3472c44897f3" (UID: "07814860-7a0a-48b8-996c-3472c44897f3"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:12:04 crc kubenswrapper[4651]: I1011 05:12:04.762222 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/07814860-7a0a-48b8-996c-3472c44897f3-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "07814860-7a0a-48b8-996c-3472c44897f3" (UID: "07814860-7a0a-48b8-996c-3472c44897f3"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:12:04 crc kubenswrapper[4651]: I1011 05:12:04.776688 4651 generic.go:334] "Generic (PLEG): container finished" podID="07814860-7a0a-48b8-996c-3472c44897f3" containerID="a3f7364e84451290999c632504bf3c62cfde20321efb17b0b64c351964edbd97" exitCode=0 Oct 11 05:12:04 crc kubenswrapper[4651]: I1011 05:12:04.776734 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c7b6c5df9-twwgp" Oct 11 05:12:04 crc kubenswrapper[4651]: I1011 05:12:04.776735 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c7b6c5df9-twwgp" event={"ID":"07814860-7a0a-48b8-996c-3472c44897f3","Type":"ContainerDied","Data":"a3f7364e84451290999c632504bf3c62cfde20321efb17b0b64c351964edbd97"} Oct 11 05:12:04 crc kubenswrapper[4651]: I1011 05:12:04.777865 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c7b6c5df9-twwgp" event={"ID":"07814860-7a0a-48b8-996c-3472c44897f3","Type":"ContainerDied","Data":"1a23fde6865598f59760ca1e3ffd6785ced61c4b9c3a60b71def15fbf5dd927b"} Oct 11 05:12:04 crc kubenswrapper[4651]: I1011 05:12:04.777889 4651 scope.go:117] "RemoveContainer" containerID="a3f7364e84451290999c632504bf3c62cfde20321efb17b0b64c351964edbd97" Oct 11 05:12:04 crc kubenswrapper[4651]: I1011 05:12:04.803714 4651 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/07814860-7a0a-48b8-996c-3472c44897f3-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 11 05:12:04 crc kubenswrapper[4651]: I1011 05:12:04.803756 4651 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/07814860-7a0a-48b8-996c-3472c44897f3-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 11 05:12:04 crc kubenswrapper[4651]: I1011 05:12:04.803769 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5jlrz\" (UniqueName: \"kubernetes.io/projected/07814860-7a0a-48b8-996c-3472c44897f3-kube-api-access-5jlrz\") on node \"crc\" DevicePath \"\"" Oct 11 05:12:04 crc kubenswrapper[4651]: I1011 05:12:04.803779 4651 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/07814860-7a0a-48b8-996c-3472c44897f3-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 11 05:12:04 crc kubenswrapper[4651]: I1011 05:12:04.803793 4651 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/07814860-7a0a-48b8-996c-3472c44897f3-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 11 05:12:04 crc kubenswrapper[4651]: I1011 05:12:04.803804 4651 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/07814860-7a0a-48b8-996c-3472c44897f3-config\") on node \"crc\" DevicePath \"\"" Oct 11 05:12:04 crc kubenswrapper[4651]: I1011 05:12:04.809008 4651 scope.go:117] "RemoveContainer" containerID="163f2d78f7594707b6a04858138c9ceeff8c916ecef9fd81cc8cd62a5e84ea50" Oct 11 05:12:04 crc kubenswrapper[4651]: I1011 05:12:04.816607 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c7b6c5df9-twwgp"] Oct 11 05:12:04 crc kubenswrapper[4651]: I1011 05:12:04.824632 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5c7b6c5df9-twwgp"] Oct 11 05:12:04 crc kubenswrapper[4651]: I1011 05:12:04.847142 4651 scope.go:117] "RemoveContainer" containerID="a3f7364e84451290999c632504bf3c62cfde20321efb17b0b64c351964edbd97" Oct 11 05:12:04 crc kubenswrapper[4651]: E1011 05:12:04.847572 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a3f7364e84451290999c632504bf3c62cfde20321efb17b0b64c351964edbd97\": container with ID starting with a3f7364e84451290999c632504bf3c62cfde20321efb17b0b64c351964edbd97 not found: ID does not exist" containerID="a3f7364e84451290999c632504bf3c62cfde20321efb17b0b64c351964edbd97" Oct 11 05:12:04 crc kubenswrapper[4651]: I1011 05:12:04.847606 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a3f7364e84451290999c632504bf3c62cfde20321efb17b0b64c351964edbd97"} err="failed to get container status \"a3f7364e84451290999c632504bf3c62cfde20321efb17b0b64c351964edbd97\": rpc error: code = NotFound desc = could not find container \"a3f7364e84451290999c632504bf3c62cfde20321efb17b0b64c351964edbd97\": container with ID starting with a3f7364e84451290999c632504bf3c62cfde20321efb17b0b64c351964edbd97 not found: ID does not exist" Oct 11 05:12:04 crc kubenswrapper[4651]: I1011 05:12:04.847655 4651 scope.go:117] "RemoveContainer" containerID="163f2d78f7594707b6a04858138c9ceeff8c916ecef9fd81cc8cd62a5e84ea50" Oct 11 05:12:04 crc kubenswrapper[4651]: E1011 05:12:04.848112 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"163f2d78f7594707b6a04858138c9ceeff8c916ecef9fd81cc8cd62a5e84ea50\": container with ID starting with 163f2d78f7594707b6a04858138c9ceeff8c916ecef9fd81cc8cd62a5e84ea50 not found: ID does not exist" containerID="163f2d78f7594707b6a04858138c9ceeff8c916ecef9fd81cc8cd62a5e84ea50" Oct 11 05:12:04 crc kubenswrapper[4651]: I1011 05:12:04.848185 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"163f2d78f7594707b6a04858138c9ceeff8c916ecef9fd81cc8cd62a5e84ea50"} err="failed to get container status \"163f2d78f7594707b6a04858138c9ceeff8c916ecef9fd81cc8cd62a5e84ea50\": rpc error: code = NotFound desc = could not find container \"163f2d78f7594707b6a04858138c9ceeff8c916ecef9fd81cc8cd62a5e84ea50\": container with ID starting with 163f2d78f7594707b6a04858138c9ceeff8c916ecef9fd81cc8cd62a5e84ea50 not found: ID does not exist" Oct 11 05:12:04 crc kubenswrapper[4651]: I1011 05:12:04.926966 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8c6f6df99-jzqc8"] Oct 11 05:12:04 crc kubenswrapper[4651]: W1011 05:12:04.933701 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6db2b942_f99f_417d_aff6_a37800db6a41.slice/crio-1bea28deae3a64d85a69bfc7130c6c49e21601ee837e1241fe3cd05c8a3a0023 WatchSource:0}: Error finding container 1bea28deae3a64d85a69bfc7130c6c49e21601ee837e1241fe3cd05c8a3a0023: Status 404 returned error can't find the container with id 1bea28deae3a64d85a69bfc7130c6c49e21601ee837e1241fe3cd05c8a3a0023 Oct 11 05:12:05 crc kubenswrapper[4651]: I1011 05:12:05.789622 4651 generic.go:334] "Generic (PLEG): container finished" podID="6db2b942-f99f-417d-aff6-a37800db6a41" containerID="084a1dd845d738c163fffa6ab36d10e417fd64d75261fa8d57d3e15d764317bf" exitCode=0 Oct 11 05:12:05 crc kubenswrapper[4651]: I1011 05:12:05.789740 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8c6f6df99-jzqc8" event={"ID":"6db2b942-f99f-417d-aff6-a37800db6a41","Type":"ContainerDied","Data":"084a1dd845d738c163fffa6ab36d10e417fd64d75261fa8d57d3e15d764317bf"} Oct 11 05:12:05 crc kubenswrapper[4651]: I1011 05:12:05.790340 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8c6f6df99-jzqc8" event={"ID":"6db2b942-f99f-417d-aff6-a37800db6a41","Type":"ContainerStarted","Data":"1bea28deae3a64d85a69bfc7130c6c49e21601ee837e1241fe3cd05c8a3a0023"} Oct 11 05:12:05 crc kubenswrapper[4651]: I1011 05:12:05.883583 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="07814860-7a0a-48b8-996c-3472c44897f3" path="/var/lib/kubelet/pods/07814860-7a0a-48b8-996c-3472c44897f3/volumes" Oct 11 05:12:06 crc kubenswrapper[4651]: I1011 05:12:06.808025 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8c6f6df99-jzqc8" event={"ID":"6db2b942-f99f-417d-aff6-a37800db6a41","Type":"ContainerStarted","Data":"65d72b1dd3b3a74cc4e20ed172ccc08cca822e48ae5571a6133c3590ad8edadd"} Oct 11 05:12:06 crc kubenswrapper[4651]: I1011 05:12:06.808425 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-8c6f6df99-jzqc8" Oct 11 05:12:06 crc kubenswrapper[4651]: I1011 05:12:06.833352 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-8c6f6df99-jzqc8" podStartSLOduration=2.83333151 podStartE2EDuration="2.83333151s" podCreationTimestamp="2025-10-11 05:12:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 05:12:06.832227482 +0000 UTC m=+1247.728460308" watchObservedRunningTime="2025-10-11 05:12:06.83333151 +0000 UTC m=+1247.729564336" Oct 11 05:12:14 crc kubenswrapper[4651]: I1011 05:12:14.470571 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-8c6f6df99-jzqc8" Oct 11 05:12:14 crc kubenswrapper[4651]: I1011 05:12:14.546399 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5576978c7c-bxvjf"] Oct 11 05:12:14 crc kubenswrapper[4651]: I1011 05:12:14.546779 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5576978c7c-bxvjf" podUID="f70e3461-5292-4c8e-b369-f54726da8b01" containerName="dnsmasq-dns" containerID="cri-o://35f98ba3f1b8ddcae5f617bd67eeaefb8ab9e2dc6c44e429a5a48b838643a835" gracePeriod=10 Oct 11 05:12:14 crc kubenswrapper[4651]: I1011 05:12:14.922410 4651 generic.go:334] "Generic (PLEG): container finished" podID="f70e3461-5292-4c8e-b369-f54726da8b01" containerID="35f98ba3f1b8ddcae5f617bd67eeaefb8ab9e2dc6c44e429a5a48b838643a835" exitCode=0 Oct 11 05:12:14 crc kubenswrapper[4651]: I1011 05:12:14.922663 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5576978c7c-bxvjf" event={"ID":"f70e3461-5292-4c8e-b369-f54726da8b01","Type":"ContainerDied","Data":"35f98ba3f1b8ddcae5f617bd67eeaefb8ab9e2dc6c44e429a5a48b838643a835"} Oct 11 05:12:15 crc kubenswrapper[4651]: I1011 05:12:15.017216 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5576978c7c-bxvjf" Oct 11 05:12:15 crc kubenswrapper[4651]: I1011 05:12:15.125100 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f70e3461-5292-4c8e-b369-f54726da8b01-dns-swift-storage-0\") pod \"f70e3461-5292-4c8e-b369-f54726da8b01\" (UID: \"f70e3461-5292-4c8e-b369-f54726da8b01\") " Oct 11 05:12:15 crc kubenswrapper[4651]: I1011 05:12:15.125179 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9lcfj\" (UniqueName: \"kubernetes.io/projected/f70e3461-5292-4c8e-b369-f54726da8b01-kube-api-access-9lcfj\") pod \"f70e3461-5292-4c8e-b369-f54726da8b01\" (UID: \"f70e3461-5292-4c8e-b369-f54726da8b01\") " Oct 11 05:12:15 crc kubenswrapper[4651]: I1011 05:12:15.125260 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f70e3461-5292-4c8e-b369-f54726da8b01-ovsdbserver-nb\") pod \"f70e3461-5292-4c8e-b369-f54726da8b01\" (UID: \"f70e3461-5292-4c8e-b369-f54726da8b01\") " Oct 11 05:12:15 crc kubenswrapper[4651]: I1011 05:12:15.125310 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/f70e3461-5292-4c8e-b369-f54726da8b01-openstack-edpm-ipam\") pod \"f70e3461-5292-4c8e-b369-f54726da8b01\" (UID: \"f70e3461-5292-4c8e-b369-f54726da8b01\") " Oct 11 05:12:15 crc kubenswrapper[4651]: I1011 05:12:15.125334 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f70e3461-5292-4c8e-b369-f54726da8b01-ovsdbserver-sb\") pod \"f70e3461-5292-4c8e-b369-f54726da8b01\" (UID: \"f70e3461-5292-4c8e-b369-f54726da8b01\") " Oct 11 05:12:15 crc kubenswrapper[4651]: I1011 05:12:15.125359 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f70e3461-5292-4c8e-b369-f54726da8b01-config\") pod \"f70e3461-5292-4c8e-b369-f54726da8b01\" (UID: \"f70e3461-5292-4c8e-b369-f54726da8b01\") " Oct 11 05:12:15 crc kubenswrapper[4651]: I1011 05:12:15.125378 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f70e3461-5292-4c8e-b369-f54726da8b01-dns-svc\") pod \"f70e3461-5292-4c8e-b369-f54726da8b01\" (UID: \"f70e3461-5292-4c8e-b369-f54726da8b01\") " Oct 11 05:12:15 crc kubenswrapper[4651]: I1011 05:12:15.130843 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f70e3461-5292-4c8e-b369-f54726da8b01-kube-api-access-9lcfj" (OuterVolumeSpecName: "kube-api-access-9lcfj") pod "f70e3461-5292-4c8e-b369-f54726da8b01" (UID: "f70e3461-5292-4c8e-b369-f54726da8b01"). InnerVolumeSpecName "kube-api-access-9lcfj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:12:15 crc kubenswrapper[4651]: I1011 05:12:15.196608 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f70e3461-5292-4c8e-b369-f54726da8b01-config" (OuterVolumeSpecName: "config") pod "f70e3461-5292-4c8e-b369-f54726da8b01" (UID: "f70e3461-5292-4c8e-b369-f54726da8b01"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:12:15 crc kubenswrapper[4651]: I1011 05:12:15.196774 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f70e3461-5292-4c8e-b369-f54726da8b01-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "f70e3461-5292-4c8e-b369-f54726da8b01" (UID: "f70e3461-5292-4c8e-b369-f54726da8b01"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:12:15 crc kubenswrapper[4651]: I1011 05:12:15.197709 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f70e3461-5292-4c8e-b369-f54726da8b01-openstack-edpm-ipam" (OuterVolumeSpecName: "openstack-edpm-ipam") pod "f70e3461-5292-4c8e-b369-f54726da8b01" (UID: "f70e3461-5292-4c8e-b369-f54726da8b01"). InnerVolumeSpecName "openstack-edpm-ipam". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:12:15 crc kubenswrapper[4651]: I1011 05:12:15.198307 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f70e3461-5292-4c8e-b369-f54726da8b01-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "f70e3461-5292-4c8e-b369-f54726da8b01" (UID: "f70e3461-5292-4c8e-b369-f54726da8b01"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:12:15 crc kubenswrapper[4651]: I1011 05:12:15.199152 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f70e3461-5292-4c8e-b369-f54726da8b01-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "f70e3461-5292-4c8e-b369-f54726da8b01" (UID: "f70e3461-5292-4c8e-b369-f54726da8b01"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:12:15 crc kubenswrapper[4651]: I1011 05:12:15.203488 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f70e3461-5292-4c8e-b369-f54726da8b01-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "f70e3461-5292-4c8e-b369-f54726da8b01" (UID: "f70e3461-5292-4c8e-b369-f54726da8b01"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:12:15 crc kubenswrapper[4651]: I1011 05:12:15.227256 4651 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f70e3461-5292-4c8e-b369-f54726da8b01-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 11 05:12:15 crc kubenswrapper[4651]: I1011 05:12:15.227289 4651 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/f70e3461-5292-4c8e-b369-f54726da8b01-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Oct 11 05:12:15 crc kubenswrapper[4651]: I1011 05:12:15.227299 4651 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f70e3461-5292-4c8e-b369-f54726da8b01-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 11 05:12:15 crc kubenswrapper[4651]: I1011 05:12:15.227311 4651 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f70e3461-5292-4c8e-b369-f54726da8b01-config\") on node \"crc\" DevicePath \"\"" Oct 11 05:12:15 crc kubenswrapper[4651]: I1011 05:12:15.227320 4651 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f70e3461-5292-4c8e-b369-f54726da8b01-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 11 05:12:15 crc kubenswrapper[4651]: I1011 05:12:15.227327 4651 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f70e3461-5292-4c8e-b369-f54726da8b01-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 11 05:12:15 crc kubenswrapper[4651]: I1011 05:12:15.227335 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9lcfj\" (UniqueName: \"kubernetes.io/projected/f70e3461-5292-4c8e-b369-f54726da8b01-kube-api-access-9lcfj\") on node \"crc\" DevicePath \"\"" Oct 11 05:12:15 crc kubenswrapper[4651]: I1011 05:12:15.934491 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5576978c7c-bxvjf" event={"ID":"f70e3461-5292-4c8e-b369-f54726da8b01","Type":"ContainerDied","Data":"283e910b86a4224b615c619359a4f8cf32ac7d753a98311b0fdb54264d508ed4"} Oct 11 05:12:15 crc kubenswrapper[4651]: I1011 05:12:15.934546 4651 scope.go:117] "RemoveContainer" containerID="35f98ba3f1b8ddcae5f617bd67eeaefb8ab9e2dc6c44e429a5a48b838643a835" Oct 11 05:12:15 crc kubenswrapper[4651]: I1011 05:12:15.934594 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5576978c7c-bxvjf" Oct 11 05:12:15 crc kubenswrapper[4651]: I1011 05:12:15.959225 4651 scope.go:117] "RemoveContainer" containerID="4097aaeb0799c6e0e97e9efe419760acf6568de01d8b673c6baa184b1e09a886" Oct 11 05:12:15 crc kubenswrapper[4651]: I1011 05:12:15.960521 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5576978c7c-bxvjf"] Oct 11 05:12:15 crc kubenswrapper[4651]: I1011 05:12:15.971182 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5576978c7c-bxvjf"] Oct 11 05:12:17 crc kubenswrapper[4651]: I1011 05:12:17.892631 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f70e3461-5292-4c8e-b369-f54726da8b01" path="/var/lib/kubelet/pods/f70e3461-5292-4c8e-b369-f54726da8b01/volumes" Oct 11 05:12:19 crc kubenswrapper[4651]: I1011 05:12:19.939683 4651 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","burstable","poddbfeee44-d2ad-4a4b-814f-916176925aaf"] err="unable to destroy cgroup paths for cgroup [kubepods burstable poddbfeee44-d2ad-4a4b-814f-916176925aaf] : Timed out while waiting for systemd to remove kubepods-burstable-poddbfeee44_d2ad_4a4b_814f_916176925aaf.slice" Oct 11 05:12:19 crc kubenswrapper[4651]: E1011 05:12:19.940043 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to delete cgroup paths for [kubepods burstable poddbfeee44-d2ad-4a4b-814f-916176925aaf] : unable to destroy cgroup paths for cgroup [kubepods burstable poddbfeee44-d2ad-4a4b-814f-916176925aaf] : Timed out while waiting for systemd to remove kubepods-burstable-poddbfeee44_d2ad_4a4b_814f_916176925aaf.slice" pod="openstack/rabbitmq-cell1-server-0" podUID="dbfeee44-d2ad-4a4b-814f-916176925aaf" Oct 11 05:12:19 crc kubenswrapper[4651]: I1011 05:12:19.983231 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 11 05:12:20 crc kubenswrapper[4651]: I1011 05:12:20.031145 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 11 05:12:20 crc kubenswrapper[4651]: I1011 05:12:20.047903 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 11 05:12:20 crc kubenswrapper[4651]: I1011 05:12:20.054528 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 11 05:12:20 crc kubenswrapper[4651]: E1011 05:12:20.055520 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="07814860-7a0a-48b8-996c-3472c44897f3" containerName="init" Oct 11 05:12:20 crc kubenswrapper[4651]: I1011 05:12:20.055554 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="07814860-7a0a-48b8-996c-3472c44897f3" containerName="init" Oct 11 05:12:20 crc kubenswrapper[4651]: E1011 05:12:20.055588 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f70e3461-5292-4c8e-b369-f54726da8b01" containerName="dnsmasq-dns" Oct 11 05:12:20 crc kubenswrapper[4651]: I1011 05:12:20.055605 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="f70e3461-5292-4c8e-b369-f54726da8b01" containerName="dnsmasq-dns" Oct 11 05:12:20 crc kubenswrapper[4651]: E1011 05:12:20.055631 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f70e3461-5292-4c8e-b369-f54726da8b01" containerName="init" Oct 11 05:12:20 crc kubenswrapper[4651]: I1011 05:12:20.055647 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="f70e3461-5292-4c8e-b369-f54726da8b01" containerName="init" Oct 11 05:12:20 crc kubenswrapper[4651]: E1011 05:12:20.055676 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="07814860-7a0a-48b8-996c-3472c44897f3" containerName="dnsmasq-dns" Oct 11 05:12:20 crc kubenswrapper[4651]: I1011 05:12:20.055693 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="07814860-7a0a-48b8-996c-3472c44897f3" containerName="dnsmasq-dns" Oct 11 05:12:20 crc kubenswrapper[4651]: I1011 05:12:20.056213 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="07814860-7a0a-48b8-996c-3472c44897f3" containerName="dnsmasq-dns" Oct 11 05:12:20 crc kubenswrapper[4651]: I1011 05:12:20.056287 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="f70e3461-5292-4c8e-b369-f54726da8b01" containerName="dnsmasq-dns" Oct 11 05:12:20 crc kubenswrapper[4651]: I1011 05:12:20.058568 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 11 05:12:20 crc kubenswrapper[4651]: I1011 05:12:20.061676 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 11 05:12:20 crc kubenswrapper[4651]: I1011 05:12:20.065032 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Oct 11 05:12:20 crc kubenswrapper[4651]: I1011 05:12:20.066717 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Oct 11 05:12:20 crc kubenswrapper[4651]: I1011 05:12:20.068106 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Oct 11 05:12:20 crc kubenswrapper[4651]: I1011 05:12:20.068283 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Oct 11 05:12:20 crc kubenswrapper[4651]: I1011 05:12:20.068348 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Oct 11 05:12:20 crc kubenswrapper[4651]: I1011 05:12:20.068427 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-jjrl8" Oct 11 05:12:20 crc kubenswrapper[4651]: I1011 05:12:20.068738 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Oct 11 05:12:20 crc kubenswrapper[4651]: I1011 05:12:20.122927 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/67140304-66cc-425b-a21c-b09bb0c83b8a-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"67140304-66cc-425b-a21c-b09bb0c83b8a\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 05:12:20 crc kubenswrapper[4651]: I1011 05:12:20.122997 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"67140304-66cc-425b-a21c-b09bb0c83b8a\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 05:12:20 crc kubenswrapper[4651]: I1011 05:12:20.123022 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lw5xl\" (UniqueName: \"kubernetes.io/projected/67140304-66cc-425b-a21c-b09bb0c83b8a-kube-api-access-lw5xl\") pod \"rabbitmq-cell1-server-0\" (UID: \"67140304-66cc-425b-a21c-b09bb0c83b8a\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 05:12:20 crc kubenswrapper[4651]: I1011 05:12:20.123057 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/67140304-66cc-425b-a21c-b09bb0c83b8a-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"67140304-66cc-425b-a21c-b09bb0c83b8a\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 05:12:20 crc kubenswrapper[4651]: I1011 05:12:20.123081 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/67140304-66cc-425b-a21c-b09bb0c83b8a-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"67140304-66cc-425b-a21c-b09bb0c83b8a\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 05:12:20 crc kubenswrapper[4651]: I1011 05:12:20.123099 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/67140304-66cc-425b-a21c-b09bb0c83b8a-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"67140304-66cc-425b-a21c-b09bb0c83b8a\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 05:12:20 crc kubenswrapper[4651]: I1011 05:12:20.123157 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/67140304-66cc-425b-a21c-b09bb0c83b8a-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"67140304-66cc-425b-a21c-b09bb0c83b8a\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 05:12:20 crc kubenswrapper[4651]: I1011 05:12:20.123217 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/67140304-66cc-425b-a21c-b09bb0c83b8a-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"67140304-66cc-425b-a21c-b09bb0c83b8a\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 05:12:20 crc kubenswrapper[4651]: I1011 05:12:20.123244 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/67140304-66cc-425b-a21c-b09bb0c83b8a-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"67140304-66cc-425b-a21c-b09bb0c83b8a\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 05:12:20 crc kubenswrapper[4651]: I1011 05:12:20.123270 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/67140304-66cc-425b-a21c-b09bb0c83b8a-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"67140304-66cc-425b-a21c-b09bb0c83b8a\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 05:12:20 crc kubenswrapper[4651]: I1011 05:12:20.123307 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/67140304-66cc-425b-a21c-b09bb0c83b8a-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"67140304-66cc-425b-a21c-b09bb0c83b8a\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 05:12:20 crc kubenswrapper[4651]: I1011 05:12:20.225360 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/67140304-66cc-425b-a21c-b09bb0c83b8a-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"67140304-66cc-425b-a21c-b09bb0c83b8a\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 05:12:20 crc kubenswrapper[4651]: I1011 05:12:20.225409 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/67140304-66cc-425b-a21c-b09bb0c83b8a-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"67140304-66cc-425b-a21c-b09bb0c83b8a\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 05:12:20 crc kubenswrapper[4651]: I1011 05:12:20.225448 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"67140304-66cc-425b-a21c-b09bb0c83b8a\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 05:12:20 crc kubenswrapper[4651]: I1011 05:12:20.225468 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lw5xl\" (UniqueName: \"kubernetes.io/projected/67140304-66cc-425b-a21c-b09bb0c83b8a-kube-api-access-lw5xl\") pod \"rabbitmq-cell1-server-0\" (UID: \"67140304-66cc-425b-a21c-b09bb0c83b8a\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 05:12:20 crc kubenswrapper[4651]: I1011 05:12:20.225503 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/67140304-66cc-425b-a21c-b09bb0c83b8a-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"67140304-66cc-425b-a21c-b09bb0c83b8a\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 05:12:20 crc kubenswrapper[4651]: I1011 05:12:20.225524 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/67140304-66cc-425b-a21c-b09bb0c83b8a-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"67140304-66cc-425b-a21c-b09bb0c83b8a\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 05:12:20 crc kubenswrapper[4651]: I1011 05:12:20.225540 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/67140304-66cc-425b-a21c-b09bb0c83b8a-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"67140304-66cc-425b-a21c-b09bb0c83b8a\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 05:12:20 crc kubenswrapper[4651]: I1011 05:12:20.225560 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/67140304-66cc-425b-a21c-b09bb0c83b8a-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"67140304-66cc-425b-a21c-b09bb0c83b8a\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 05:12:20 crc kubenswrapper[4651]: I1011 05:12:20.225616 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/67140304-66cc-425b-a21c-b09bb0c83b8a-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"67140304-66cc-425b-a21c-b09bb0c83b8a\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 05:12:20 crc kubenswrapper[4651]: I1011 05:12:20.225641 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/67140304-66cc-425b-a21c-b09bb0c83b8a-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"67140304-66cc-425b-a21c-b09bb0c83b8a\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 05:12:20 crc kubenswrapper[4651]: I1011 05:12:20.225658 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/67140304-66cc-425b-a21c-b09bb0c83b8a-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"67140304-66cc-425b-a21c-b09bb0c83b8a\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 05:12:20 crc kubenswrapper[4651]: I1011 05:12:20.225869 4651 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"67140304-66cc-425b-a21c-b09bb0c83b8a\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/rabbitmq-cell1-server-0" Oct 11 05:12:20 crc kubenswrapper[4651]: I1011 05:12:20.226607 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/67140304-66cc-425b-a21c-b09bb0c83b8a-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"67140304-66cc-425b-a21c-b09bb0c83b8a\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 05:12:20 crc kubenswrapper[4651]: I1011 05:12:20.226630 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/67140304-66cc-425b-a21c-b09bb0c83b8a-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"67140304-66cc-425b-a21c-b09bb0c83b8a\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 05:12:20 crc kubenswrapper[4651]: I1011 05:12:20.226726 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/67140304-66cc-425b-a21c-b09bb0c83b8a-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"67140304-66cc-425b-a21c-b09bb0c83b8a\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 05:12:20 crc kubenswrapper[4651]: I1011 05:12:20.226948 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/67140304-66cc-425b-a21c-b09bb0c83b8a-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"67140304-66cc-425b-a21c-b09bb0c83b8a\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 05:12:20 crc kubenswrapper[4651]: I1011 05:12:20.227981 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/67140304-66cc-425b-a21c-b09bb0c83b8a-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"67140304-66cc-425b-a21c-b09bb0c83b8a\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 05:12:20 crc kubenswrapper[4651]: I1011 05:12:20.231481 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/67140304-66cc-425b-a21c-b09bb0c83b8a-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"67140304-66cc-425b-a21c-b09bb0c83b8a\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 05:12:20 crc kubenswrapper[4651]: I1011 05:12:20.232298 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/67140304-66cc-425b-a21c-b09bb0c83b8a-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"67140304-66cc-425b-a21c-b09bb0c83b8a\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 05:12:20 crc kubenswrapper[4651]: I1011 05:12:20.234183 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/67140304-66cc-425b-a21c-b09bb0c83b8a-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"67140304-66cc-425b-a21c-b09bb0c83b8a\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 05:12:20 crc kubenswrapper[4651]: I1011 05:12:20.240732 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/67140304-66cc-425b-a21c-b09bb0c83b8a-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"67140304-66cc-425b-a21c-b09bb0c83b8a\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 05:12:20 crc kubenswrapper[4651]: I1011 05:12:20.244493 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lw5xl\" (UniqueName: \"kubernetes.io/projected/67140304-66cc-425b-a21c-b09bb0c83b8a-kube-api-access-lw5xl\") pod \"rabbitmq-cell1-server-0\" (UID: \"67140304-66cc-425b-a21c-b09bb0c83b8a\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 05:12:20 crc kubenswrapper[4651]: I1011 05:12:20.254129 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"67140304-66cc-425b-a21c-b09bb0c83b8a\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 05:12:20 crc kubenswrapper[4651]: I1011 05:12:20.392067 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 11 05:12:20 crc kubenswrapper[4651]: I1011 05:12:20.881165 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 11 05:12:20 crc kubenswrapper[4651]: I1011 05:12:20.990632 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"67140304-66cc-425b-a21c-b09bb0c83b8a","Type":"ContainerStarted","Data":"f290cea4fdedbfe45737fc057826bbb754e6f1177f7ea186345b2d27f59e0899"} Oct 11 05:12:21 crc kubenswrapper[4651]: I1011 05:12:21.882609 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dbfeee44-d2ad-4a4b-814f-916176925aaf" path="/var/lib/kubelet/pods/dbfeee44-d2ad-4a4b-814f-916176925aaf/volumes" Oct 11 05:12:23 crc kubenswrapper[4651]: I1011 05:12:23.030961 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"67140304-66cc-425b-a21c-b09bb0c83b8a","Type":"ContainerStarted","Data":"b69af4d40b9a611911dae24a6b7a9d4fa8faf0640104d57fa77ef0b5dfae299d"} Oct 11 05:12:23 crc kubenswrapper[4651]: I1011 05:12:23.520391 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jl25m"] Oct 11 05:12:23 crc kubenswrapper[4651]: I1011 05:12:23.521527 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jl25m" Oct 11 05:12:23 crc kubenswrapper[4651]: I1011 05:12:23.524524 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-r489p" Oct 11 05:12:23 crc kubenswrapper[4651]: I1011 05:12:23.524571 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 11 05:12:23 crc kubenswrapper[4651]: I1011 05:12:23.524747 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 11 05:12:23 crc kubenswrapper[4651]: I1011 05:12:23.524909 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 11 05:12:23 crc kubenswrapper[4651]: I1011 05:12:23.563060 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jl25m"] Oct 11 05:12:23 crc kubenswrapper[4651]: I1011 05:12:23.595467 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tmvjv\" (UniqueName: \"kubernetes.io/projected/fdd938c5-0eb6-402b-9ee3-28bd04fbd55e-kube-api-access-tmvjv\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-jl25m\" (UID: \"fdd938c5-0eb6-402b-9ee3-28bd04fbd55e\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jl25m" Oct 11 05:12:23 crc kubenswrapper[4651]: I1011 05:12:23.595790 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fdd938c5-0eb6-402b-9ee3-28bd04fbd55e-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-jl25m\" (UID: \"fdd938c5-0eb6-402b-9ee3-28bd04fbd55e\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jl25m" Oct 11 05:12:23 crc kubenswrapper[4651]: I1011 05:12:23.596085 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fdd938c5-0eb6-402b-9ee3-28bd04fbd55e-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-jl25m\" (UID: \"fdd938c5-0eb6-402b-9ee3-28bd04fbd55e\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jl25m" Oct 11 05:12:23 crc kubenswrapper[4651]: I1011 05:12:23.596216 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fdd938c5-0eb6-402b-9ee3-28bd04fbd55e-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-jl25m\" (UID: \"fdd938c5-0eb6-402b-9ee3-28bd04fbd55e\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jl25m" Oct 11 05:12:23 crc kubenswrapper[4651]: I1011 05:12:23.698400 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fdd938c5-0eb6-402b-9ee3-28bd04fbd55e-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-jl25m\" (UID: \"fdd938c5-0eb6-402b-9ee3-28bd04fbd55e\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jl25m" Oct 11 05:12:23 crc kubenswrapper[4651]: I1011 05:12:23.699154 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tmvjv\" (UniqueName: \"kubernetes.io/projected/fdd938c5-0eb6-402b-9ee3-28bd04fbd55e-kube-api-access-tmvjv\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-jl25m\" (UID: \"fdd938c5-0eb6-402b-9ee3-28bd04fbd55e\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jl25m" Oct 11 05:12:23 crc kubenswrapper[4651]: I1011 05:12:23.699490 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fdd938c5-0eb6-402b-9ee3-28bd04fbd55e-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-jl25m\" (UID: \"fdd938c5-0eb6-402b-9ee3-28bd04fbd55e\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jl25m" Oct 11 05:12:23 crc kubenswrapper[4651]: I1011 05:12:23.699764 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fdd938c5-0eb6-402b-9ee3-28bd04fbd55e-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-jl25m\" (UID: \"fdd938c5-0eb6-402b-9ee3-28bd04fbd55e\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jl25m" Oct 11 05:12:23 crc kubenswrapper[4651]: I1011 05:12:23.714016 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fdd938c5-0eb6-402b-9ee3-28bd04fbd55e-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-jl25m\" (UID: \"fdd938c5-0eb6-402b-9ee3-28bd04fbd55e\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jl25m" Oct 11 05:12:23 crc kubenswrapper[4651]: I1011 05:12:23.714044 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fdd938c5-0eb6-402b-9ee3-28bd04fbd55e-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-jl25m\" (UID: \"fdd938c5-0eb6-402b-9ee3-28bd04fbd55e\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jl25m" Oct 11 05:12:23 crc kubenswrapper[4651]: I1011 05:12:23.714391 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fdd938c5-0eb6-402b-9ee3-28bd04fbd55e-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-jl25m\" (UID: \"fdd938c5-0eb6-402b-9ee3-28bd04fbd55e\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jl25m" Oct 11 05:12:23 crc kubenswrapper[4651]: I1011 05:12:23.718739 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tmvjv\" (UniqueName: \"kubernetes.io/projected/fdd938c5-0eb6-402b-9ee3-28bd04fbd55e-kube-api-access-tmvjv\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-jl25m\" (UID: \"fdd938c5-0eb6-402b-9ee3-28bd04fbd55e\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jl25m" Oct 11 05:12:23 crc kubenswrapper[4651]: I1011 05:12:23.843126 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jl25m" Oct 11 05:12:24 crc kubenswrapper[4651]: I1011 05:12:24.379357 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jl25m"] Oct 11 05:12:24 crc kubenswrapper[4651]: W1011 05:12:24.382989 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfdd938c5_0eb6_402b_9ee3_28bd04fbd55e.slice/crio-24ebde7ab0836780d6c66163268c01e9b7ccef919805f25baa3a4e186be04b86 WatchSource:0}: Error finding container 24ebde7ab0836780d6c66163268c01e9b7ccef919805f25baa3a4e186be04b86: Status 404 returned error can't find the container with id 24ebde7ab0836780d6c66163268c01e9b7ccef919805f25baa3a4e186be04b86 Oct 11 05:12:24 crc kubenswrapper[4651]: I1011 05:12:24.384919 4651 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 11 05:12:25 crc kubenswrapper[4651]: I1011 05:12:25.056173 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jl25m" event={"ID":"fdd938c5-0eb6-402b-9ee3-28bd04fbd55e","Type":"ContainerStarted","Data":"24ebde7ab0836780d6c66163268c01e9b7ccef919805f25baa3a4e186be04b86"} Oct 11 05:12:25 crc kubenswrapper[4651]: I1011 05:12:25.057734 4651 generic.go:334] "Generic (PLEG): container finished" podID="f157ed46-75e8-4f03-b4ec-1234385015bd" containerID="e8c9c7b02cc1d55fc2976f945c124ba54405fddf552bfc3cf3276b05d325b2b0" exitCode=0 Oct 11 05:12:25 crc kubenswrapper[4651]: I1011 05:12:25.057763 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"f157ed46-75e8-4f03-b4ec-1234385015bd","Type":"ContainerDied","Data":"e8c9c7b02cc1d55fc2976f945c124ba54405fddf552bfc3cf3276b05d325b2b0"} Oct 11 05:12:26 crc kubenswrapper[4651]: I1011 05:12:26.071365 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"f157ed46-75e8-4f03-b4ec-1234385015bd","Type":"ContainerStarted","Data":"0dc6aac0efd1ecccf0690d22c0699754963c57c3f701a27b23c93d169a34ab1b"} Oct 11 05:12:26 crc kubenswrapper[4651]: I1011 05:12:26.071626 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Oct 11 05:12:26 crc kubenswrapper[4651]: I1011 05:12:26.099769 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=38.099752996 podStartE2EDuration="38.099752996s" podCreationTimestamp="2025-10-11 05:11:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 05:12:26.091173628 +0000 UTC m=+1266.987406434" watchObservedRunningTime="2025-10-11 05:12:26.099752996 +0000 UTC m=+1266.995985782" Oct 11 05:12:33 crc kubenswrapper[4651]: I1011 05:12:33.134137 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jl25m" event={"ID":"fdd938c5-0eb6-402b-9ee3-28bd04fbd55e","Type":"ContainerStarted","Data":"8770493762ff43296357c4ee62ebd33578fc65df25f2685a5368c97f588b8048"} Oct 11 05:12:33 crc kubenswrapper[4651]: I1011 05:12:33.158165 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jl25m" podStartSLOduration=2.251771192 podStartE2EDuration="10.158142889s" podCreationTimestamp="2025-10-11 05:12:23 +0000 UTC" firstStartedPulling="2025-10-11 05:12:24.38465103 +0000 UTC m=+1265.280883836" lastFinishedPulling="2025-10-11 05:12:32.291022727 +0000 UTC m=+1273.187255533" observedRunningTime="2025-10-11 05:12:33.152958657 +0000 UTC m=+1274.049191453" watchObservedRunningTime="2025-10-11 05:12:33.158142889 +0000 UTC m=+1274.054375685" Oct 11 05:12:39 crc kubenswrapper[4651]: I1011 05:12:39.298125 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Oct 11 05:12:45 crc kubenswrapper[4651]: I1011 05:12:45.266973 4651 generic.go:334] "Generic (PLEG): container finished" podID="fdd938c5-0eb6-402b-9ee3-28bd04fbd55e" containerID="8770493762ff43296357c4ee62ebd33578fc65df25f2685a5368c97f588b8048" exitCode=0 Oct 11 05:12:45 crc kubenswrapper[4651]: I1011 05:12:45.267061 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jl25m" event={"ID":"fdd938c5-0eb6-402b-9ee3-28bd04fbd55e","Type":"ContainerDied","Data":"8770493762ff43296357c4ee62ebd33578fc65df25f2685a5368c97f588b8048"} Oct 11 05:12:46 crc kubenswrapper[4651]: I1011 05:12:46.809564 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jl25m" Oct 11 05:12:46 crc kubenswrapper[4651]: I1011 05:12:46.884013 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tmvjv\" (UniqueName: \"kubernetes.io/projected/fdd938c5-0eb6-402b-9ee3-28bd04fbd55e-kube-api-access-tmvjv\") pod \"fdd938c5-0eb6-402b-9ee3-28bd04fbd55e\" (UID: \"fdd938c5-0eb6-402b-9ee3-28bd04fbd55e\") " Oct 11 05:12:46 crc kubenswrapper[4651]: I1011 05:12:46.884219 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fdd938c5-0eb6-402b-9ee3-28bd04fbd55e-repo-setup-combined-ca-bundle\") pod \"fdd938c5-0eb6-402b-9ee3-28bd04fbd55e\" (UID: \"fdd938c5-0eb6-402b-9ee3-28bd04fbd55e\") " Oct 11 05:12:46 crc kubenswrapper[4651]: I1011 05:12:46.884282 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fdd938c5-0eb6-402b-9ee3-28bd04fbd55e-ssh-key\") pod \"fdd938c5-0eb6-402b-9ee3-28bd04fbd55e\" (UID: \"fdd938c5-0eb6-402b-9ee3-28bd04fbd55e\") " Oct 11 05:12:46 crc kubenswrapper[4651]: I1011 05:12:46.884322 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fdd938c5-0eb6-402b-9ee3-28bd04fbd55e-inventory\") pod \"fdd938c5-0eb6-402b-9ee3-28bd04fbd55e\" (UID: \"fdd938c5-0eb6-402b-9ee3-28bd04fbd55e\") " Oct 11 05:12:46 crc kubenswrapper[4651]: I1011 05:12:46.891205 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fdd938c5-0eb6-402b-9ee3-28bd04fbd55e-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "fdd938c5-0eb6-402b-9ee3-28bd04fbd55e" (UID: "fdd938c5-0eb6-402b-9ee3-28bd04fbd55e"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:12:46 crc kubenswrapper[4651]: I1011 05:12:46.897686 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fdd938c5-0eb6-402b-9ee3-28bd04fbd55e-kube-api-access-tmvjv" (OuterVolumeSpecName: "kube-api-access-tmvjv") pod "fdd938c5-0eb6-402b-9ee3-28bd04fbd55e" (UID: "fdd938c5-0eb6-402b-9ee3-28bd04fbd55e"). InnerVolumeSpecName "kube-api-access-tmvjv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:12:46 crc kubenswrapper[4651]: I1011 05:12:46.912717 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fdd938c5-0eb6-402b-9ee3-28bd04fbd55e-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "fdd938c5-0eb6-402b-9ee3-28bd04fbd55e" (UID: "fdd938c5-0eb6-402b-9ee3-28bd04fbd55e"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:12:46 crc kubenswrapper[4651]: I1011 05:12:46.919218 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fdd938c5-0eb6-402b-9ee3-28bd04fbd55e-inventory" (OuterVolumeSpecName: "inventory") pod "fdd938c5-0eb6-402b-9ee3-28bd04fbd55e" (UID: "fdd938c5-0eb6-402b-9ee3-28bd04fbd55e"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:12:46 crc kubenswrapper[4651]: I1011 05:12:46.987251 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tmvjv\" (UniqueName: \"kubernetes.io/projected/fdd938c5-0eb6-402b-9ee3-28bd04fbd55e-kube-api-access-tmvjv\") on node \"crc\" DevicePath \"\"" Oct 11 05:12:46 crc kubenswrapper[4651]: I1011 05:12:46.987283 4651 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fdd938c5-0eb6-402b-9ee3-28bd04fbd55e-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 05:12:46 crc kubenswrapper[4651]: I1011 05:12:46.987294 4651 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fdd938c5-0eb6-402b-9ee3-28bd04fbd55e-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 11 05:12:46 crc kubenswrapper[4651]: I1011 05:12:46.987320 4651 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fdd938c5-0eb6-402b-9ee3-28bd04fbd55e-inventory\") on node \"crc\" DevicePath \"\"" Oct 11 05:12:47 crc kubenswrapper[4651]: I1011 05:12:47.289660 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jl25m" event={"ID":"fdd938c5-0eb6-402b-9ee3-28bd04fbd55e","Type":"ContainerDied","Data":"24ebde7ab0836780d6c66163268c01e9b7ccef919805f25baa3a4e186be04b86"} Oct 11 05:12:47 crc kubenswrapper[4651]: I1011 05:12:47.289699 4651 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="24ebde7ab0836780d6c66163268c01e9b7ccef919805f25baa3a4e186be04b86" Oct 11 05:12:47 crc kubenswrapper[4651]: I1011 05:12:47.289744 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-jl25m" Oct 11 05:12:47 crc kubenswrapper[4651]: I1011 05:12:47.400643 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-kwqbl"] Oct 11 05:12:47 crc kubenswrapper[4651]: E1011 05:12:47.401556 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fdd938c5-0eb6-402b-9ee3-28bd04fbd55e" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Oct 11 05:12:47 crc kubenswrapper[4651]: I1011 05:12:47.401580 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="fdd938c5-0eb6-402b-9ee3-28bd04fbd55e" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Oct 11 05:12:47 crc kubenswrapper[4651]: I1011 05:12:47.402077 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="fdd938c5-0eb6-402b-9ee3-28bd04fbd55e" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Oct 11 05:12:47 crc kubenswrapper[4651]: I1011 05:12:47.403159 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-kwqbl" Oct 11 05:12:47 crc kubenswrapper[4651]: I1011 05:12:47.405196 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-r489p" Oct 11 05:12:47 crc kubenswrapper[4651]: I1011 05:12:47.407050 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 11 05:12:47 crc kubenswrapper[4651]: I1011 05:12:47.407328 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 11 05:12:47 crc kubenswrapper[4651]: I1011 05:12:47.407759 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 11 05:12:47 crc kubenswrapper[4651]: I1011 05:12:47.427944 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-kwqbl"] Oct 11 05:12:47 crc kubenswrapper[4651]: I1011 05:12:47.496950 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f8169fed-dda6-4c75-8a3c-4ecd3b7e1866-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-kwqbl\" (UID: \"f8169fed-dda6-4c75-8a3c-4ecd3b7e1866\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-kwqbl" Oct 11 05:12:47 crc kubenswrapper[4651]: I1011 05:12:47.497111 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f8169fed-dda6-4c75-8a3c-4ecd3b7e1866-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-kwqbl\" (UID: \"f8169fed-dda6-4c75-8a3c-4ecd3b7e1866\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-kwqbl" Oct 11 05:12:47 crc kubenswrapper[4651]: I1011 05:12:47.497222 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5v5mh\" (UniqueName: \"kubernetes.io/projected/f8169fed-dda6-4c75-8a3c-4ecd3b7e1866-kube-api-access-5v5mh\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-kwqbl\" (UID: \"f8169fed-dda6-4c75-8a3c-4ecd3b7e1866\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-kwqbl" Oct 11 05:12:47 crc kubenswrapper[4651]: I1011 05:12:47.599674 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5v5mh\" (UniqueName: \"kubernetes.io/projected/f8169fed-dda6-4c75-8a3c-4ecd3b7e1866-kube-api-access-5v5mh\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-kwqbl\" (UID: \"f8169fed-dda6-4c75-8a3c-4ecd3b7e1866\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-kwqbl" Oct 11 05:12:47 crc kubenswrapper[4651]: I1011 05:12:47.599761 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f8169fed-dda6-4c75-8a3c-4ecd3b7e1866-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-kwqbl\" (UID: \"f8169fed-dda6-4c75-8a3c-4ecd3b7e1866\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-kwqbl" Oct 11 05:12:47 crc kubenswrapper[4651]: I1011 05:12:47.600136 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f8169fed-dda6-4c75-8a3c-4ecd3b7e1866-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-kwqbl\" (UID: \"f8169fed-dda6-4c75-8a3c-4ecd3b7e1866\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-kwqbl" Oct 11 05:12:47 crc kubenswrapper[4651]: I1011 05:12:47.604095 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f8169fed-dda6-4c75-8a3c-4ecd3b7e1866-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-kwqbl\" (UID: \"f8169fed-dda6-4c75-8a3c-4ecd3b7e1866\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-kwqbl" Oct 11 05:12:47 crc kubenswrapper[4651]: I1011 05:12:47.604653 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f8169fed-dda6-4c75-8a3c-4ecd3b7e1866-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-kwqbl\" (UID: \"f8169fed-dda6-4c75-8a3c-4ecd3b7e1866\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-kwqbl" Oct 11 05:12:47 crc kubenswrapper[4651]: I1011 05:12:47.615965 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5v5mh\" (UniqueName: \"kubernetes.io/projected/f8169fed-dda6-4c75-8a3c-4ecd3b7e1866-kube-api-access-5v5mh\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-kwqbl\" (UID: \"f8169fed-dda6-4c75-8a3c-4ecd3b7e1866\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-kwqbl" Oct 11 05:12:47 crc kubenswrapper[4651]: I1011 05:12:47.738968 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-kwqbl" Oct 11 05:12:48 crc kubenswrapper[4651]: I1011 05:12:48.311070 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-kwqbl"] Oct 11 05:12:49 crc kubenswrapper[4651]: I1011 05:12:49.313559 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-kwqbl" event={"ID":"f8169fed-dda6-4c75-8a3c-4ecd3b7e1866","Type":"ContainerStarted","Data":"3e4f53597bfea634b39df64d7a54e4f9d33cc6a0f7f3c5723bdef2c74e284378"} Oct 11 05:12:49 crc kubenswrapper[4651]: I1011 05:12:49.313886 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-kwqbl" event={"ID":"f8169fed-dda6-4c75-8a3c-4ecd3b7e1866","Type":"ContainerStarted","Data":"c6cbc21978f4d2e1a6344b4974c456763163b9a2672743a227a79b5796a365cf"} Oct 11 05:12:49 crc kubenswrapper[4651]: I1011 05:12:49.349121 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-kwqbl" podStartSLOduration=1.928460167 podStartE2EDuration="2.349093929s" podCreationTimestamp="2025-10-11 05:12:47 +0000 UTC" firstStartedPulling="2025-10-11 05:12:48.32372172 +0000 UTC m=+1289.219954516" lastFinishedPulling="2025-10-11 05:12:48.744355472 +0000 UTC m=+1289.640588278" observedRunningTime="2025-10-11 05:12:49.328684119 +0000 UTC m=+1290.224916985" watchObservedRunningTime="2025-10-11 05:12:49.349093929 +0000 UTC m=+1290.245326755" Oct 11 05:12:52 crc kubenswrapper[4651]: I1011 05:12:52.342038 4651 generic.go:334] "Generic (PLEG): container finished" podID="f8169fed-dda6-4c75-8a3c-4ecd3b7e1866" containerID="3e4f53597bfea634b39df64d7a54e4f9d33cc6a0f7f3c5723bdef2c74e284378" exitCode=0 Oct 11 05:12:52 crc kubenswrapper[4651]: I1011 05:12:52.342076 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-kwqbl" event={"ID":"f8169fed-dda6-4c75-8a3c-4ecd3b7e1866","Type":"ContainerDied","Data":"3e4f53597bfea634b39df64d7a54e4f9d33cc6a0f7f3c5723bdef2c74e284378"} Oct 11 05:12:53 crc kubenswrapper[4651]: I1011 05:12:53.809645 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-kwqbl" Oct 11 05:12:53 crc kubenswrapper[4651]: I1011 05:12:53.936555 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5v5mh\" (UniqueName: \"kubernetes.io/projected/f8169fed-dda6-4c75-8a3c-4ecd3b7e1866-kube-api-access-5v5mh\") pod \"f8169fed-dda6-4c75-8a3c-4ecd3b7e1866\" (UID: \"f8169fed-dda6-4c75-8a3c-4ecd3b7e1866\") " Oct 11 05:12:53 crc kubenswrapper[4651]: I1011 05:12:53.936596 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f8169fed-dda6-4c75-8a3c-4ecd3b7e1866-inventory\") pod \"f8169fed-dda6-4c75-8a3c-4ecd3b7e1866\" (UID: \"f8169fed-dda6-4c75-8a3c-4ecd3b7e1866\") " Oct 11 05:12:53 crc kubenswrapper[4651]: I1011 05:12:53.936786 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f8169fed-dda6-4c75-8a3c-4ecd3b7e1866-ssh-key\") pod \"f8169fed-dda6-4c75-8a3c-4ecd3b7e1866\" (UID: \"f8169fed-dda6-4c75-8a3c-4ecd3b7e1866\") " Oct 11 05:12:53 crc kubenswrapper[4651]: I1011 05:12:53.943052 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f8169fed-dda6-4c75-8a3c-4ecd3b7e1866-kube-api-access-5v5mh" (OuterVolumeSpecName: "kube-api-access-5v5mh") pod "f8169fed-dda6-4c75-8a3c-4ecd3b7e1866" (UID: "f8169fed-dda6-4c75-8a3c-4ecd3b7e1866"). InnerVolumeSpecName "kube-api-access-5v5mh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:12:53 crc kubenswrapper[4651]: I1011 05:12:53.962816 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f8169fed-dda6-4c75-8a3c-4ecd3b7e1866-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "f8169fed-dda6-4c75-8a3c-4ecd3b7e1866" (UID: "f8169fed-dda6-4c75-8a3c-4ecd3b7e1866"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:12:53 crc kubenswrapper[4651]: I1011 05:12:53.965384 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f8169fed-dda6-4c75-8a3c-4ecd3b7e1866-inventory" (OuterVolumeSpecName: "inventory") pod "f8169fed-dda6-4c75-8a3c-4ecd3b7e1866" (UID: "f8169fed-dda6-4c75-8a3c-4ecd3b7e1866"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:12:54 crc kubenswrapper[4651]: I1011 05:12:54.038807 4651 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f8169fed-dda6-4c75-8a3c-4ecd3b7e1866-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 11 05:12:54 crc kubenswrapper[4651]: I1011 05:12:54.038856 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5v5mh\" (UniqueName: \"kubernetes.io/projected/f8169fed-dda6-4c75-8a3c-4ecd3b7e1866-kube-api-access-5v5mh\") on node \"crc\" DevicePath \"\"" Oct 11 05:12:54 crc kubenswrapper[4651]: I1011 05:12:54.038866 4651 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f8169fed-dda6-4c75-8a3c-4ecd3b7e1866-inventory\") on node \"crc\" DevicePath \"\"" Oct 11 05:12:54 crc kubenswrapper[4651]: I1011 05:12:54.361786 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-kwqbl" event={"ID":"f8169fed-dda6-4c75-8a3c-4ecd3b7e1866","Type":"ContainerDied","Data":"c6cbc21978f4d2e1a6344b4974c456763163b9a2672743a227a79b5796a365cf"} Oct 11 05:12:54 crc kubenswrapper[4651]: I1011 05:12:54.361849 4651 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c6cbc21978f4d2e1a6344b4974c456763163b9a2672743a227a79b5796a365cf" Oct 11 05:12:54 crc kubenswrapper[4651]: I1011 05:12:54.361894 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-kwqbl" Oct 11 05:12:54 crc kubenswrapper[4651]: I1011 05:12:54.433106 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-h2pxs"] Oct 11 05:12:54 crc kubenswrapper[4651]: E1011 05:12:54.433624 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f8169fed-dda6-4c75-8a3c-4ecd3b7e1866" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Oct 11 05:12:54 crc kubenswrapper[4651]: I1011 05:12:54.433656 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="f8169fed-dda6-4c75-8a3c-4ecd3b7e1866" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Oct 11 05:12:54 crc kubenswrapper[4651]: I1011 05:12:54.433973 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="f8169fed-dda6-4c75-8a3c-4ecd3b7e1866" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Oct 11 05:12:54 crc kubenswrapper[4651]: I1011 05:12:54.434802 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-h2pxs" Oct 11 05:12:54 crc kubenswrapper[4651]: I1011 05:12:54.438348 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 11 05:12:54 crc kubenswrapper[4651]: I1011 05:12:54.438362 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 11 05:12:54 crc kubenswrapper[4651]: I1011 05:12:54.438490 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-r489p" Oct 11 05:12:54 crc kubenswrapper[4651]: I1011 05:12:54.439790 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 11 05:12:54 crc kubenswrapper[4651]: I1011 05:12:54.461640 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-h2pxs"] Oct 11 05:12:54 crc kubenswrapper[4651]: I1011 05:12:54.547595 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/073ca1d1-d406-4d47-bfdd-1d1ccc6a0444-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-h2pxs\" (UID: \"073ca1d1-d406-4d47-bfdd-1d1ccc6a0444\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-h2pxs" Oct 11 05:12:54 crc kubenswrapper[4651]: I1011 05:12:54.547867 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/073ca1d1-d406-4d47-bfdd-1d1ccc6a0444-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-h2pxs\" (UID: \"073ca1d1-d406-4d47-bfdd-1d1ccc6a0444\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-h2pxs" Oct 11 05:12:54 crc kubenswrapper[4651]: I1011 05:12:54.547897 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/073ca1d1-d406-4d47-bfdd-1d1ccc6a0444-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-h2pxs\" (UID: \"073ca1d1-d406-4d47-bfdd-1d1ccc6a0444\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-h2pxs" Oct 11 05:12:54 crc kubenswrapper[4651]: I1011 05:12:54.547956 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s8qnr\" (UniqueName: \"kubernetes.io/projected/073ca1d1-d406-4d47-bfdd-1d1ccc6a0444-kube-api-access-s8qnr\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-h2pxs\" (UID: \"073ca1d1-d406-4d47-bfdd-1d1ccc6a0444\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-h2pxs" Oct 11 05:12:54 crc kubenswrapper[4651]: I1011 05:12:54.649600 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/073ca1d1-d406-4d47-bfdd-1d1ccc6a0444-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-h2pxs\" (UID: \"073ca1d1-d406-4d47-bfdd-1d1ccc6a0444\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-h2pxs" Oct 11 05:12:54 crc kubenswrapper[4651]: I1011 05:12:54.649638 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/073ca1d1-d406-4d47-bfdd-1d1ccc6a0444-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-h2pxs\" (UID: \"073ca1d1-d406-4d47-bfdd-1d1ccc6a0444\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-h2pxs" Oct 11 05:12:54 crc kubenswrapper[4651]: I1011 05:12:54.649664 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/073ca1d1-d406-4d47-bfdd-1d1ccc6a0444-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-h2pxs\" (UID: \"073ca1d1-d406-4d47-bfdd-1d1ccc6a0444\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-h2pxs" Oct 11 05:12:54 crc kubenswrapper[4651]: I1011 05:12:54.649711 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s8qnr\" (UniqueName: \"kubernetes.io/projected/073ca1d1-d406-4d47-bfdd-1d1ccc6a0444-kube-api-access-s8qnr\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-h2pxs\" (UID: \"073ca1d1-d406-4d47-bfdd-1d1ccc6a0444\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-h2pxs" Oct 11 05:12:54 crc kubenswrapper[4651]: I1011 05:12:54.655014 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/073ca1d1-d406-4d47-bfdd-1d1ccc6a0444-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-h2pxs\" (UID: \"073ca1d1-d406-4d47-bfdd-1d1ccc6a0444\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-h2pxs" Oct 11 05:12:54 crc kubenswrapper[4651]: I1011 05:12:54.655797 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/073ca1d1-d406-4d47-bfdd-1d1ccc6a0444-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-h2pxs\" (UID: \"073ca1d1-d406-4d47-bfdd-1d1ccc6a0444\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-h2pxs" Oct 11 05:12:54 crc kubenswrapper[4651]: I1011 05:12:54.661506 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/073ca1d1-d406-4d47-bfdd-1d1ccc6a0444-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-h2pxs\" (UID: \"073ca1d1-d406-4d47-bfdd-1d1ccc6a0444\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-h2pxs" Oct 11 05:12:54 crc kubenswrapper[4651]: I1011 05:12:54.669862 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s8qnr\" (UniqueName: \"kubernetes.io/projected/073ca1d1-d406-4d47-bfdd-1d1ccc6a0444-kube-api-access-s8qnr\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-h2pxs\" (UID: \"073ca1d1-d406-4d47-bfdd-1d1ccc6a0444\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-h2pxs" Oct 11 05:12:54 crc kubenswrapper[4651]: I1011 05:12:54.759630 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-h2pxs" Oct 11 05:12:55 crc kubenswrapper[4651]: I1011 05:12:55.249599 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-h2pxs"] Oct 11 05:12:55 crc kubenswrapper[4651]: I1011 05:12:55.373730 4651 generic.go:334] "Generic (PLEG): container finished" podID="67140304-66cc-425b-a21c-b09bb0c83b8a" containerID="b69af4d40b9a611911dae24a6b7a9d4fa8faf0640104d57fa77ef0b5dfae299d" exitCode=0 Oct 11 05:12:55 crc kubenswrapper[4651]: I1011 05:12:55.373797 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"67140304-66cc-425b-a21c-b09bb0c83b8a","Type":"ContainerDied","Data":"b69af4d40b9a611911dae24a6b7a9d4fa8faf0640104d57fa77ef0b5dfae299d"} Oct 11 05:12:55 crc kubenswrapper[4651]: I1011 05:12:55.375539 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-h2pxs" event={"ID":"073ca1d1-d406-4d47-bfdd-1d1ccc6a0444","Type":"ContainerStarted","Data":"b27763f4bc3908fb15905c66059962f7dead115a701e81d5e6e644eda5e9bf25"} Oct 11 05:12:56 crc kubenswrapper[4651]: I1011 05:12:56.392542 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-h2pxs" event={"ID":"073ca1d1-d406-4d47-bfdd-1d1ccc6a0444","Type":"ContainerStarted","Data":"bcba071824c1fc58f59a9d110d949db9082919156ef12bbacaaef6d3b8a1a5c7"} Oct 11 05:12:56 crc kubenswrapper[4651]: I1011 05:12:56.395588 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"67140304-66cc-425b-a21c-b09bb0c83b8a","Type":"ContainerStarted","Data":"8886b893de319713a8ad9a79b42612a5b23f724a6b44f6d2bce00daf15c1b92c"} Oct 11 05:12:56 crc kubenswrapper[4651]: I1011 05:12:56.395910 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Oct 11 05:12:56 crc kubenswrapper[4651]: I1011 05:12:56.422437 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-h2pxs" podStartSLOduration=1.798646001 podStartE2EDuration="2.422404371s" podCreationTimestamp="2025-10-11 05:12:54 +0000 UTC" firstStartedPulling="2025-10-11 05:12:55.257931324 +0000 UTC m=+1296.154164140" lastFinishedPulling="2025-10-11 05:12:55.881689704 +0000 UTC m=+1296.777922510" observedRunningTime="2025-10-11 05:12:56.410807936 +0000 UTC m=+1297.307040762" watchObservedRunningTime="2025-10-11 05:12:56.422404371 +0000 UTC m=+1297.318637177" Oct 11 05:12:56 crc kubenswrapper[4651]: I1011 05:12:56.437005 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=36.436987672 podStartE2EDuration="36.436987672s" podCreationTimestamp="2025-10-11 05:12:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 05:12:56.433404261 +0000 UTC m=+1297.329637087" watchObservedRunningTime="2025-10-11 05:12:56.436987672 +0000 UTC m=+1297.333220478" Oct 11 05:13:10 crc kubenswrapper[4651]: I1011 05:13:10.398000 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Oct 11 05:13:16 crc kubenswrapper[4651]: I1011 05:13:16.310361 4651 patch_prober.go:28] interesting pod/machine-config-daemon-78jnv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 05:13:16 crc kubenswrapper[4651]: I1011 05:13:16.311245 4651 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 05:13:32 crc kubenswrapper[4651]: I1011 05:13:32.328533 4651 scope.go:117] "RemoveContainer" containerID="dba91db59b330a75a88f4bf1b3e3b3b23f41ca7ce0d75ff9776900bf33cdd2bc" Oct 11 05:13:32 crc kubenswrapper[4651]: I1011 05:13:32.377854 4651 scope.go:117] "RemoveContainer" containerID="3d1126b6ea5739b8acbd5810cfc36396dde69f981fb91d4145cf4f977590a3f5" Oct 11 05:13:46 crc kubenswrapper[4651]: I1011 05:13:46.310491 4651 patch_prober.go:28] interesting pod/machine-config-daemon-78jnv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 05:13:46 crc kubenswrapper[4651]: I1011 05:13:46.311018 4651 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 05:14:16 crc kubenswrapper[4651]: I1011 05:14:16.310767 4651 patch_prober.go:28] interesting pod/machine-config-daemon-78jnv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 05:14:16 crc kubenswrapper[4651]: I1011 05:14:16.311499 4651 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 05:14:16 crc kubenswrapper[4651]: I1011 05:14:16.311563 4651 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" Oct 11 05:14:16 crc kubenswrapper[4651]: I1011 05:14:16.312760 4651 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"fdfc9e3f19e3d1ca9aaf28ab30da5e0edfd8b9e029feb471e05d551727bd0ad1"} pod="openshift-machine-config-operator/machine-config-daemon-78jnv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 11 05:14:16 crc kubenswrapper[4651]: I1011 05:14:16.312912 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" containerName="machine-config-daemon" containerID="cri-o://fdfc9e3f19e3d1ca9aaf28ab30da5e0edfd8b9e029feb471e05d551727bd0ad1" gracePeriod=600 Oct 11 05:14:17 crc kubenswrapper[4651]: I1011 05:14:17.278676 4651 generic.go:334] "Generic (PLEG): container finished" podID="519a1ae1-e964-48b0-8b61-835146df28c1" containerID="fdfc9e3f19e3d1ca9aaf28ab30da5e0edfd8b9e029feb471e05d551727bd0ad1" exitCode=0 Oct 11 05:14:17 crc kubenswrapper[4651]: I1011 05:14:17.278780 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" event={"ID":"519a1ae1-e964-48b0-8b61-835146df28c1","Type":"ContainerDied","Data":"fdfc9e3f19e3d1ca9aaf28ab30da5e0edfd8b9e029feb471e05d551727bd0ad1"} Oct 11 05:14:17 crc kubenswrapper[4651]: I1011 05:14:17.279279 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" event={"ID":"519a1ae1-e964-48b0-8b61-835146df28c1","Type":"ContainerStarted","Data":"9e22517d26caf83c6f2aa3910c4d04d694ebd6ca37f48f88ab2ba796ba35e6e7"} Oct 11 05:14:17 crc kubenswrapper[4651]: I1011 05:14:17.279306 4651 scope.go:117] "RemoveContainer" containerID="58a7a61e6423d5c4aad48bf422e788efe6a0897625015570766366bb08a19f53" Oct 11 05:14:32 crc kubenswrapper[4651]: I1011 05:14:32.484500 4651 scope.go:117] "RemoveContainer" containerID="9afbbc7cb4c14d6ce0164bfe604ff8f34b985ca3fbb40debe1e9f79f85e3940b" Oct 11 05:14:32 crc kubenswrapper[4651]: I1011 05:14:32.561045 4651 scope.go:117] "RemoveContainer" containerID="b060d41a176950323808f226ba4155e249c40327f219166e9b7bf88a1ffe963f" Oct 11 05:14:32 crc kubenswrapper[4651]: I1011 05:14:32.596252 4651 scope.go:117] "RemoveContainer" containerID="41f4eb004f48b1d6cabf3fe68dbf9b05944491f79be5cfb2f75ae208b8e80204" Oct 11 05:14:38 crc kubenswrapper[4651]: I1011 05:14:38.760957 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-txprc"] Oct 11 05:14:38 crc kubenswrapper[4651]: I1011 05:14:38.764284 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-txprc" Oct 11 05:14:38 crc kubenswrapper[4651]: I1011 05:14:38.786115 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-txprc"] Oct 11 05:14:38 crc kubenswrapper[4651]: I1011 05:14:38.951392 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5d653176-cde7-4298-8674-11b73a452d1d-catalog-content\") pod \"redhat-marketplace-txprc\" (UID: \"5d653176-cde7-4298-8674-11b73a452d1d\") " pod="openshift-marketplace/redhat-marketplace-txprc" Oct 11 05:14:38 crc kubenswrapper[4651]: I1011 05:14:38.951564 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5d653176-cde7-4298-8674-11b73a452d1d-utilities\") pod \"redhat-marketplace-txprc\" (UID: \"5d653176-cde7-4298-8674-11b73a452d1d\") " pod="openshift-marketplace/redhat-marketplace-txprc" Oct 11 05:14:38 crc kubenswrapper[4651]: I1011 05:14:38.951606 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s49ls\" (UniqueName: \"kubernetes.io/projected/5d653176-cde7-4298-8674-11b73a452d1d-kube-api-access-s49ls\") pod \"redhat-marketplace-txprc\" (UID: \"5d653176-cde7-4298-8674-11b73a452d1d\") " pod="openshift-marketplace/redhat-marketplace-txprc" Oct 11 05:14:39 crc kubenswrapper[4651]: I1011 05:14:39.053842 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5d653176-cde7-4298-8674-11b73a452d1d-utilities\") pod \"redhat-marketplace-txprc\" (UID: \"5d653176-cde7-4298-8674-11b73a452d1d\") " pod="openshift-marketplace/redhat-marketplace-txprc" Oct 11 05:14:39 crc kubenswrapper[4651]: I1011 05:14:39.053935 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s49ls\" (UniqueName: \"kubernetes.io/projected/5d653176-cde7-4298-8674-11b73a452d1d-kube-api-access-s49ls\") pod \"redhat-marketplace-txprc\" (UID: \"5d653176-cde7-4298-8674-11b73a452d1d\") " pod="openshift-marketplace/redhat-marketplace-txprc" Oct 11 05:14:39 crc kubenswrapper[4651]: I1011 05:14:39.053986 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5d653176-cde7-4298-8674-11b73a452d1d-catalog-content\") pod \"redhat-marketplace-txprc\" (UID: \"5d653176-cde7-4298-8674-11b73a452d1d\") " pod="openshift-marketplace/redhat-marketplace-txprc" Oct 11 05:14:39 crc kubenswrapper[4651]: I1011 05:14:39.054493 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5d653176-cde7-4298-8674-11b73a452d1d-utilities\") pod \"redhat-marketplace-txprc\" (UID: \"5d653176-cde7-4298-8674-11b73a452d1d\") " pod="openshift-marketplace/redhat-marketplace-txprc" Oct 11 05:14:39 crc kubenswrapper[4651]: I1011 05:14:39.054640 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5d653176-cde7-4298-8674-11b73a452d1d-catalog-content\") pod \"redhat-marketplace-txprc\" (UID: \"5d653176-cde7-4298-8674-11b73a452d1d\") " pod="openshift-marketplace/redhat-marketplace-txprc" Oct 11 05:14:39 crc kubenswrapper[4651]: I1011 05:14:39.083839 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s49ls\" (UniqueName: \"kubernetes.io/projected/5d653176-cde7-4298-8674-11b73a452d1d-kube-api-access-s49ls\") pod \"redhat-marketplace-txprc\" (UID: \"5d653176-cde7-4298-8674-11b73a452d1d\") " pod="openshift-marketplace/redhat-marketplace-txprc" Oct 11 05:14:39 crc kubenswrapper[4651]: I1011 05:14:39.114474 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-txprc" Oct 11 05:14:39 crc kubenswrapper[4651]: I1011 05:14:39.602807 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-txprc"] Oct 11 05:14:40 crc kubenswrapper[4651]: I1011 05:14:40.560479 4651 generic.go:334] "Generic (PLEG): container finished" podID="5d653176-cde7-4298-8674-11b73a452d1d" containerID="09724de15af4b15e2d11fd4f30869860344df48ba777d72b861145c1f994cce8" exitCode=0 Oct 11 05:14:40 crc kubenswrapper[4651]: I1011 05:14:40.560609 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-txprc" event={"ID":"5d653176-cde7-4298-8674-11b73a452d1d","Type":"ContainerDied","Data":"09724de15af4b15e2d11fd4f30869860344df48ba777d72b861145c1f994cce8"} Oct 11 05:14:40 crc kubenswrapper[4651]: I1011 05:14:40.560988 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-txprc" event={"ID":"5d653176-cde7-4298-8674-11b73a452d1d","Type":"ContainerStarted","Data":"13d3c4db71f4802b5b462ce4646c77b3382bba72ac688acb2c3e220df17b22bd"} Oct 11 05:14:41 crc kubenswrapper[4651]: I1011 05:14:41.571780 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-txprc" event={"ID":"5d653176-cde7-4298-8674-11b73a452d1d","Type":"ContainerStarted","Data":"7747a690d760f2c388f40da1f19a16732ed9223314392b6b175f9d47bdb5e614"} Oct 11 05:14:42 crc kubenswrapper[4651]: I1011 05:14:42.584919 4651 generic.go:334] "Generic (PLEG): container finished" podID="5d653176-cde7-4298-8674-11b73a452d1d" containerID="7747a690d760f2c388f40da1f19a16732ed9223314392b6b175f9d47bdb5e614" exitCode=0 Oct 11 05:14:42 crc kubenswrapper[4651]: I1011 05:14:42.584994 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-txprc" event={"ID":"5d653176-cde7-4298-8674-11b73a452d1d","Type":"ContainerDied","Data":"7747a690d760f2c388f40da1f19a16732ed9223314392b6b175f9d47bdb5e614"} Oct 11 05:14:43 crc kubenswrapper[4651]: I1011 05:14:43.595677 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-txprc" event={"ID":"5d653176-cde7-4298-8674-11b73a452d1d","Type":"ContainerStarted","Data":"6f85976e55c23a86729d06b80c2173cade974a9a7099d438fe2862b207b585c0"} Oct 11 05:14:43 crc kubenswrapper[4651]: I1011 05:14:43.611493 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-txprc" podStartSLOduration=3.173398529 podStartE2EDuration="5.611462662s" podCreationTimestamp="2025-10-11 05:14:38 +0000 UTC" firstStartedPulling="2025-10-11 05:14:40.567888078 +0000 UTC m=+1401.464120874" lastFinishedPulling="2025-10-11 05:14:43.005952211 +0000 UTC m=+1403.902185007" observedRunningTime="2025-10-11 05:14:43.609170854 +0000 UTC m=+1404.505403660" watchObservedRunningTime="2025-10-11 05:14:43.611462662 +0000 UTC m=+1404.507695458" Oct 11 05:14:49 crc kubenswrapper[4651]: I1011 05:14:49.114964 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-txprc" Oct 11 05:14:49 crc kubenswrapper[4651]: I1011 05:14:49.115364 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-txprc" Oct 11 05:14:49 crc kubenswrapper[4651]: I1011 05:14:49.190989 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-txprc" Oct 11 05:14:49 crc kubenswrapper[4651]: I1011 05:14:49.709715 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-txprc" Oct 11 05:14:49 crc kubenswrapper[4651]: I1011 05:14:49.763673 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-txprc"] Oct 11 05:14:51 crc kubenswrapper[4651]: I1011 05:14:51.675772 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-txprc" podUID="5d653176-cde7-4298-8674-11b73a452d1d" containerName="registry-server" containerID="cri-o://6f85976e55c23a86729d06b80c2173cade974a9a7099d438fe2862b207b585c0" gracePeriod=2 Oct 11 05:14:52 crc kubenswrapper[4651]: I1011 05:14:52.174912 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-txprc" Oct 11 05:14:52 crc kubenswrapper[4651]: I1011 05:14:52.342400 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5d653176-cde7-4298-8674-11b73a452d1d-catalog-content\") pod \"5d653176-cde7-4298-8674-11b73a452d1d\" (UID: \"5d653176-cde7-4298-8674-11b73a452d1d\") " Oct 11 05:14:52 crc kubenswrapper[4651]: I1011 05:14:52.342499 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s49ls\" (UniqueName: \"kubernetes.io/projected/5d653176-cde7-4298-8674-11b73a452d1d-kube-api-access-s49ls\") pod \"5d653176-cde7-4298-8674-11b73a452d1d\" (UID: \"5d653176-cde7-4298-8674-11b73a452d1d\") " Oct 11 05:14:52 crc kubenswrapper[4651]: I1011 05:14:52.342567 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5d653176-cde7-4298-8674-11b73a452d1d-utilities\") pod \"5d653176-cde7-4298-8674-11b73a452d1d\" (UID: \"5d653176-cde7-4298-8674-11b73a452d1d\") " Oct 11 05:14:52 crc kubenswrapper[4651]: I1011 05:14:52.343870 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5d653176-cde7-4298-8674-11b73a452d1d-utilities" (OuterVolumeSpecName: "utilities") pod "5d653176-cde7-4298-8674-11b73a452d1d" (UID: "5d653176-cde7-4298-8674-11b73a452d1d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 05:14:52 crc kubenswrapper[4651]: I1011 05:14:52.348059 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5d653176-cde7-4298-8674-11b73a452d1d-kube-api-access-s49ls" (OuterVolumeSpecName: "kube-api-access-s49ls") pod "5d653176-cde7-4298-8674-11b73a452d1d" (UID: "5d653176-cde7-4298-8674-11b73a452d1d"). InnerVolumeSpecName "kube-api-access-s49ls". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:14:52 crc kubenswrapper[4651]: I1011 05:14:52.354945 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5d653176-cde7-4298-8674-11b73a452d1d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5d653176-cde7-4298-8674-11b73a452d1d" (UID: "5d653176-cde7-4298-8674-11b73a452d1d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 05:14:52 crc kubenswrapper[4651]: I1011 05:14:52.444982 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s49ls\" (UniqueName: \"kubernetes.io/projected/5d653176-cde7-4298-8674-11b73a452d1d-kube-api-access-s49ls\") on node \"crc\" DevicePath \"\"" Oct 11 05:14:52 crc kubenswrapper[4651]: I1011 05:14:52.445019 4651 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5d653176-cde7-4298-8674-11b73a452d1d-utilities\") on node \"crc\" DevicePath \"\"" Oct 11 05:14:52 crc kubenswrapper[4651]: I1011 05:14:52.445032 4651 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5d653176-cde7-4298-8674-11b73a452d1d-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 11 05:14:52 crc kubenswrapper[4651]: I1011 05:14:52.687023 4651 generic.go:334] "Generic (PLEG): container finished" podID="5d653176-cde7-4298-8674-11b73a452d1d" containerID="6f85976e55c23a86729d06b80c2173cade974a9a7099d438fe2862b207b585c0" exitCode=0 Oct 11 05:14:52 crc kubenswrapper[4651]: I1011 05:14:52.687076 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-txprc" event={"ID":"5d653176-cde7-4298-8674-11b73a452d1d","Type":"ContainerDied","Data":"6f85976e55c23a86729d06b80c2173cade974a9a7099d438fe2862b207b585c0"} Oct 11 05:14:52 crc kubenswrapper[4651]: I1011 05:14:52.687111 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-txprc" event={"ID":"5d653176-cde7-4298-8674-11b73a452d1d","Type":"ContainerDied","Data":"13d3c4db71f4802b5b462ce4646c77b3382bba72ac688acb2c3e220df17b22bd"} Oct 11 05:14:52 crc kubenswrapper[4651]: I1011 05:14:52.687137 4651 scope.go:117] "RemoveContainer" containerID="6f85976e55c23a86729d06b80c2173cade974a9a7099d438fe2862b207b585c0" Oct 11 05:14:52 crc kubenswrapper[4651]: I1011 05:14:52.687306 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-txprc" Oct 11 05:14:52 crc kubenswrapper[4651]: I1011 05:14:52.735726 4651 scope.go:117] "RemoveContainer" containerID="7747a690d760f2c388f40da1f19a16732ed9223314392b6b175f9d47bdb5e614" Oct 11 05:14:52 crc kubenswrapper[4651]: I1011 05:14:52.764936 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-txprc"] Oct 11 05:14:52 crc kubenswrapper[4651]: I1011 05:14:52.768213 4651 scope.go:117] "RemoveContainer" containerID="09724de15af4b15e2d11fd4f30869860344df48ba777d72b861145c1f994cce8" Oct 11 05:14:52 crc kubenswrapper[4651]: I1011 05:14:52.775572 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-txprc"] Oct 11 05:14:52 crc kubenswrapper[4651]: I1011 05:14:52.808528 4651 scope.go:117] "RemoveContainer" containerID="6f85976e55c23a86729d06b80c2173cade974a9a7099d438fe2862b207b585c0" Oct 11 05:14:52 crc kubenswrapper[4651]: E1011 05:14:52.809131 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6f85976e55c23a86729d06b80c2173cade974a9a7099d438fe2862b207b585c0\": container with ID starting with 6f85976e55c23a86729d06b80c2173cade974a9a7099d438fe2862b207b585c0 not found: ID does not exist" containerID="6f85976e55c23a86729d06b80c2173cade974a9a7099d438fe2862b207b585c0" Oct 11 05:14:52 crc kubenswrapper[4651]: I1011 05:14:52.809175 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6f85976e55c23a86729d06b80c2173cade974a9a7099d438fe2862b207b585c0"} err="failed to get container status \"6f85976e55c23a86729d06b80c2173cade974a9a7099d438fe2862b207b585c0\": rpc error: code = NotFound desc = could not find container \"6f85976e55c23a86729d06b80c2173cade974a9a7099d438fe2862b207b585c0\": container with ID starting with 6f85976e55c23a86729d06b80c2173cade974a9a7099d438fe2862b207b585c0 not found: ID does not exist" Oct 11 05:14:52 crc kubenswrapper[4651]: I1011 05:14:52.809202 4651 scope.go:117] "RemoveContainer" containerID="7747a690d760f2c388f40da1f19a16732ed9223314392b6b175f9d47bdb5e614" Oct 11 05:14:52 crc kubenswrapper[4651]: E1011 05:14:52.809523 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7747a690d760f2c388f40da1f19a16732ed9223314392b6b175f9d47bdb5e614\": container with ID starting with 7747a690d760f2c388f40da1f19a16732ed9223314392b6b175f9d47bdb5e614 not found: ID does not exist" containerID="7747a690d760f2c388f40da1f19a16732ed9223314392b6b175f9d47bdb5e614" Oct 11 05:14:52 crc kubenswrapper[4651]: I1011 05:14:52.809581 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7747a690d760f2c388f40da1f19a16732ed9223314392b6b175f9d47bdb5e614"} err="failed to get container status \"7747a690d760f2c388f40da1f19a16732ed9223314392b6b175f9d47bdb5e614\": rpc error: code = NotFound desc = could not find container \"7747a690d760f2c388f40da1f19a16732ed9223314392b6b175f9d47bdb5e614\": container with ID starting with 7747a690d760f2c388f40da1f19a16732ed9223314392b6b175f9d47bdb5e614 not found: ID does not exist" Oct 11 05:14:52 crc kubenswrapper[4651]: I1011 05:14:52.809618 4651 scope.go:117] "RemoveContainer" containerID="09724de15af4b15e2d11fd4f30869860344df48ba777d72b861145c1f994cce8" Oct 11 05:14:52 crc kubenswrapper[4651]: E1011 05:14:52.810155 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"09724de15af4b15e2d11fd4f30869860344df48ba777d72b861145c1f994cce8\": container with ID starting with 09724de15af4b15e2d11fd4f30869860344df48ba777d72b861145c1f994cce8 not found: ID does not exist" containerID="09724de15af4b15e2d11fd4f30869860344df48ba777d72b861145c1f994cce8" Oct 11 05:14:52 crc kubenswrapper[4651]: I1011 05:14:52.810191 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"09724de15af4b15e2d11fd4f30869860344df48ba777d72b861145c1f994cce8"} err="failed to get container status \"09724de15af4b15e2d11fd4f30869860344df48ba777d72b861145c1f994cce8\": rpc error: code = NotFound desc = could not find container \"09724de15af4b15e2d11fd4f30869860344df48ba777d72b861145c1f994cce8\": container with ID starting with 09724de15af4b15e2d11fd4f30869860344df48ba777d72b861145c1f994cce8 not found: ID does not exist" Oct 11 05:14:52 crc kubenswrapper[4651]: I1011 05:14:52.871608 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-pfgdv"] Oct 11 05:14:52 crc kubenswrapper[4651]: E1011 05:14:52.874425 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5d653176-cde7-4298-8674-11b73a452d1d" containerName="registry-server" Oct 11 05:14:52 crc kubenswrapper[4651]: I1011 05:14:52.874470 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="5d653176-cde7-4298-8674-11b73a452d1d" containerName="registry-server" Oct 11 05:14:52 crc kubenswrapper[4651]: E1011 05:14:52.874534 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5d653176-cde7-4298-8674-11b73a452d1d" containerName="extract-utilities" Oct 11 05:14:52 crc kubenswrapper[4651]: I1011 05:14:52.874544 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="5d653176-cde7-4298-8674-11b73a452d1d" containerName="extract-utilities" Oct 11 05:14:52 crc kubenswrapper[4651]: E1011 05:14:52.874611 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5d653176-cde7-4298-8674-11b73a452d1d" containerName="extract-content" Oct 11 05:14:52 crc kubenswrapper[4651]: I1011 05:14:52.874619 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="5d653176-cde7-4298-8674-11b73a452d1d" containerName="extract-content" Oct 11 05:14:52 crc kubenswrapper[4651]: I1011 05:14:52.875227 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="5d653176-cde7-4298-8674-11b73a452d1d" containerName="registry-server" Oct 11 05:14:52 crc kubenswrapper[4651]: I1011 05:14:52.882208 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-pfgdv" Oct 11 05:14:52 crc kubenswrapper[4651]: I1011 05:14:52.888909 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-pfgdv"] Oct 11 05:14:53 crc kubenswrapper[4651]: I1011 05:14:53.065101 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3dd1afe3-22e4-4818-bdfe-5f273132c530-utilities\") pod \"certified-operators-pfgdv\" (UID: \"3dd1afe3-22e4-4818-bdfe-5f273132c530\") " pod="openshift-marketplace/certified-operators-pfgdv" Oct 11 05:14:53 crc kubenswrapper[4651]: I1011 05:14:53.065425 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bg56r\" (UniqueName: \"kubernetes.io/projected/3dd1afe3-22e4-4818-bdfe-5f273132c530-kube-api-access-bg56r\") pod \"certified-operators-pfgdv\" (UID: \"3dd1afe3-22e4-4818-bdfe-5f273132c530\") " pod="openshift-marketplace/certified-operators-pfgdv" Oct 11 05:14:53 crc kubenswrapper[4651]: I1011 05:14:53.065534 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3dd1afe3-22e4-4818-bdfe-5f273132c530-catalog-content\") pod \"certified-operators-pfgdv\" (UID: \"3dd1afe3-22e4-4818-bdfe-5f273132c530\") " pod="openshift-marketplace/certified-operators-pfgdv" Oct 11 05:14:53 crc kubenswrapper[4651]: I1011 05:14:53.167666 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bg56r\" (UniqueName: \"kubernetes.io/projected/3dd1afe3-22e4-4818-bdfe-5f273132c530-kube-api-access-bg56r\") pod \"certified-operators-pfgdv\" (UID: \"3dd1afe3-22e4-4818-bdfe-5f273132c530\") " pod="openshift-marketplace/certified-operators-pfgdv" Oct 11 05:14:53 crc kubenswrapper[4651]: I1011 05:14:53.168102 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3dd1afe3-22e4-4818-bdfe-5f273132c530-catalog-content\") pod \"certified-operators-pfgdv\" (UID: \"3dd1afe3-22e4-4818-bdfe-5f273132c530\") " pod="openshift-marketplace/certified-operators-pfgdv" Oct 11 05:14:53 crc kubenswrapper[4651]: I1011 05:14:53.168530 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3dd1afe3-22e4-4818-bdfe-5f273132c530-catalog-content\") pod \"certified-operators-pfgdv\" (UID: \"3dd1afe3-22e4-4818-bdfe-5f273132c530\") " pod="openshift-marketplace/certified-operators-pfgdv" Oct 11 05:14:53 crc kubenswrapper[4651]: I1011 05:14:53.168842 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3dd1afe3-22e4-4818-bdfe-5f273132c530-utilities\") pod \"certified-operators-pfgdv\" (UID: \"3dd1afe3-22e4-4818-bdfe-5f273132c530\") " pod="openshift-marketplace/certified-operators-pfgdv" Oct 11 05:14:53 crc kubenswrapper[4651]: I1011 05:14:53.168875 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3dd1afe3-22e4-4818-bdfe-5f273132c530-utilities\") pod \"certified-operators-pfgdv\" (UID: \"3dd1afe3-22e4-4818-bdfe-5f273132c530\") " pod="openshift-marketplace/certified-operators-pfgdv" Oct 11 05:14:53 crc kubenswrapper[4651]: I1011 05:14:53.187334 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bg56r\" (UniqueName: \"kubernetes.io/projected/3dd1afe3-22e4-4818-bdfe-5f273132c530-kube-api-access-bg56r\") pod \"certified-operators-pfgdv\" (UID: \"3dd1afe3-22e4-4818-bdfe-5f273132c530\") " pod="openshift-marketplace/certified-operators-pfgdv" Oct 11 05:14:53 crc kubenswrapper[4651]: I1011 05:14:53.207247 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-pfgdv" Oct 11 05:14:53 crc kubenswrapper[4651]: I1011 05:14:53.704883 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-pfgdv"] Oct 11 05:14:53 crc kubenswrapper[4651]: I1011 05:14:53.890670 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5d653176-cde7-4298-8674-11b73a452d1d" path="/var/lib/kubelet/pods/5d653176-cde7-4298-8674-11b73a452d1d/volumes" Oct 11 05:14:54 crc kubenswrapper[4651]: I1011 05:14:54.711884 4651 generic.go:334] "Generic (PLEG): container finished" podID="3dd1afe3-22e4-4818-bdfe-5f273132c530" containerID="0cf7295d8ddbbef778d2c91f311957bfafe775a7965132dd08130f9fed071fe0" exitCode=0 Oct 11 05:14:54 crc kubenswrapper[4651]: I1011 05:14:54.712098 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pfgdv" event={"ID":"3dd1afe3-22e4-4818-bdfe-5f273132c530","Type":"ContainerDied","Data":"0cf7295d8ddbbef778d2c91f311957bfafe775a7965132dd08130f9fed071fe0"} Oct 11 05:14:54 crc kubenswrapper[4651]: I1011 05:14:54.712749 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pfgdv" event={"ID":"3dd1afe3-22e4-4818-bdfe-5f273132c530","Type":"ContainerStarted","Data":"6efac95b0899b36a88a23c59277bc61f64749c8425dbd98072f7ad39d09096df"} Oct 11 05:14:56 crc kubenswrapper[4651]: I1011 05:14:56.734773 4651 generic.go:334] "Generic (PLEG): container finished" podID="3dd1afe3-22e4-4818-bdfe-5f273132c530" containerID="559b599a149f653bc182fc23d044f1fa1dedc1249e4f093927edaeed8f69d8f3" exitCode=0 Oct 11 05:14:56 crc kubenswrapper[4651]: I1011 05:14:56.734871 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pfgdv" event={"ID":"3dd1afe3-22e4-4818-bdfe-5f273132c530","Type":"ContainerDied","Data":"559b599a149f653bc182fc23d044f1fa1dedc1249e4f093927edaeed8f69d8f3"} Oct 11 05:14:57 crc kubenswrapper[4651]: I1011 05:14:57.752248 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pfgdv" event={"ID":"3dd1afe3-22e4-4818-bdfe-5f273132c530","Type":"ContainerStarted","Data":"a5425b0df1da2e2c1fe742c7b1a53a07e776cb5cef3b8c98b12e7356e880f404"} Oct 11 05:14:57 crc kubenswrapper[4651]: I1011 05:14:57.785701 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-pfgdv" podStartSLOduration=3.365364177 podStartE2EDuration="5.785683127s" podCreationTimestamp="2025-10-11 05:14:52 +0000 UTC" firstStartedPulling="2025-10-11 05:14:54.714079712 +0000 UTC m=+1415.610312508" lastFinishedPulling="2025-10-11 05:14:57.134398652 +0000 UTC m=+1418.030631458" observedRunningTime="2025-10-11 05:14:57.776392721 +0000 UTC m=+1418.672625527" watchObservedRunningTime="2025-10-11 05:14:57.785683127 +0000 UTC m=+1418.681915933" Oct 11 05:15:00 crc kubenswrapper[4651]: I1011 05:15:00.162643 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29335995-g6dc8"] Oct 11 05:15:00 crc kubenswrapper[4651]: I1011 05:15:00.165184 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29335995-g6dc8" Oct 11 05:15:00 crc kubenswrapper[4651]: I1011 05:15:00.168851 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 11 05:15:00 crc kubenswrapper[4651]: I1011 05:15:00.169408 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 11 05:15:00 crc kubenswrapper[4651]: I1011 05:15:00.182557 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29335995-g6dc8"] Oct 11 05:15:00 crc kubenswrapper[4651]: I1011 05:15:00.306332 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3aacd804-c502-494e-9d34-b2547f0abb1e-config-volume\") pod \"collect-profiles-29335995-g6dc8\" (UID: \"3aacd804-c502-494e-9d34-b2547f0abb1e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335995-g6dc8" Oct 11 05:15:00 crc kubenswrapper[4651]: I1011 05:15:00.306789 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bmjz4\" (UniqueName: \"kubernetes.io/projected/3aacd804-c502-494e-9d34-b2547f0abb1e-kube-api-access-bmjz4\") pod \"collect-profiles-29335995-g6dc8\" (UID: \"3aacd804-c502-494e-9d34-b2547f0abb1e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335995-g6dc8" Oct 11 05:15:00 crc kubenswrapper[4651]: I1011 05:15:00.306935 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3aacd804-c502-494e-9d34-b2547f0abb1e-secret-volume\") pod \"collect-profiles-29335995-g6dc8\" (UID: \"3aacd804-c502-494e-9d34-b2547f0abb1e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335995-g6dc8" Oct 11 05:15:00 crc kubenswrapper[4651]: I1011 05:15:00.407959 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3aacd804-c502-494e-9d34-b2547f0abb1e-config-volume\") pod \"collect-profiles-29335995-g6dc8\" (UID: \"3aacd804-c502-494e-9d34-b2547f0abb1e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335995-g6dc8" Oct 11 05:15:00 crc kubenswrapper[4651]: I1011 05:15:00.408348 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bmjz4\" (UniqueName: \"kubernetes.io/projected/3aacd804-c502-494e-9d34-b2547f0abb1e-kube-api-access-bmjz4\") pod \"collect-profiles-29335995-g6dc8\" (UID: \"3aacd804-c502-494e-9d34-b2547f0abb1e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335995-g6dc8" Oct 11 05:15:00 crc kubenswrapper[4651]: I1011 05:15:00.408495 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3aacd804-c502-494e-9d34-b2547f0abb1e-secret-volume\") pod \"collect-profiles-29335995-g6dc8\" (UID: \"3aacd804-c502-494e-9d34-b2547f0abb1e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335995-g6dc8" Oct 11 05:15:00 crc kubenswrapper[4651]: I1011 05:15:00.408980 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3aacd804-c502-494e-9d34-b2547f0abb1e-config-volume\") pod \"collect-profiles-29335995-g6dc8\" (UID: \"3aacd804-c502-494e-9d34-b2547f0abb1e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335995-g6dc8" Oct 11 05:15:00 crc kubenswrapper[4651]: I1011 05:15:00.423358 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3aacd804-c502-494e-9d34-b2547f0abb1e-secret-volume\") pod \"collect-profiles-29335995-g6dc8\" (UID: \"3aacd804-c502-494e-9d34-b2547f0abb1e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335995-g6dc8" Oct 11 05:15:00 crc kubenswrapper[4651]: I1011 05:15:00.427005 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bmjz4\" (UniqueName: \"kubernetes.io/projected/3aacd804-c502-494e-9d34-b2547f0abb1e-kube-api-access-bmjz4\") pod \"collect-profiles-29335995-g6dc8\" (UID: \"3aacd804-c502-494e-9d34-b2547f0abb1e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335995-g6dc8" Oct 11 05:15:00 crc kubenswrapper[4651]: I1011 05:15:00.536860 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29335995-g6dc8" Oct 11 05:15:00 crc kubenswrapper[4651]: I1011 05:15:00.958740 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29335995-g6dc8"] Oct 11 05:15:00 crc kubenswrapper[4651]: W1011 05:15:00.962773 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3aacd804_c502_494e_9d34_b2547f0abb1e.slice/crio-f5e9b03565c1c50a64184973118d76175d862b260862fdaed9f2ab7052e00079 WatchSource:0}: Error finding container f5e9b03565c1c50a64184973118d76175d862b260862fdaed9f2ab7052e00079: Status 404 returned error can't find the container with id f5e9b03565c1c50a64184973118d76175d862b260862fdaed9f2ab7052e00079 Oct 11 05:15:01 crc kubenswrapper[4651]: I1011 05:15:01.807299 4651 generic.go:334] "Generic (PLEG): container finished" podID="3aacd804-c502-494e-9d34-b2547f0abb1e" containerID="147778ea63189bf10092acd2abf04ffad81c42a12bce3201443eba5303685e3c" exitCode=0 Oct 11 05:15:01 crc kubenswrapper[4651]: I1011 05:15:01.807415 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29335995-g6dc8" event={"ID":"3aacd804-c502-494e-9d34-b2547f0abb1e","Type":"ContainerDied","Data":"147778ea63189bf10092acd2abf04ffad81c42a12bce3201443eba5303685e3c"} Oct 11 05:15:01 crc kubenswrapper[4651]: I1011 05:15:01.807542 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29335995-g6dc8" event={"ID":"3aacd804-c502-494e-9d34-b2547f0abb1e","Type":"ContainerStarted","Data":"f5e9b03565c1c50a64184973118d76175d862b260862fdaed9f2ab7052e00079"} Oct 11 05:15:03 crc kubenswrapper[4651]: I1011 05:15:03.155964 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29335995-g6dc8" Oct 11 05:15:03 crc kubenswrapper[4651]: I1011 05:15:03.208639 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-pfgdv" Oct 11 05:15:03 crc kubenswrapper[4651]: I1011 05:15:03.209679 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-pfgdv" Oct 11 05:15:03 crc kubenswrapper[4651]: I1011 05:15:03.252138 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-pfgdv" Oct 11 05:15:03 crc kubenswrapper[4651]: I1011 05:15:03.260566 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3aacd804-c502-494e-9d34-b2547f0abb1e-config-volume\") pod \"3aacd804-c502-494e-9d34-b2547f0abb1e\" (UID: \"3aacd804-c502-494e-9d34-b2547f0abb1e\") " Oct 11 05:15:03 crc kubenswrapper[4651]: I1011 05:15:03.260608 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3aacd804-c502-494e-9d34-b2547f0abb1e-secret-volume\") pod \"3aacd804-c502-494e-9d34-b2547f0abb1e\" (UID: \"3aacd804-c502-494e-9d34-b2547f0abb1e\") " Oct 11 05:15:03 crc kubenswrapper[4651]: I1011 05:15:03.260755 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bmjz4\" (UniqueName: \"kubernetes.io/projected/3aacd804-c502-494e-9d34-b2547f0abb1e-kube-api-access-bmjz4\") pod \"3aacd804-c502-494e-9d34-b2547f0abb1e\" (UID: \"3aacd804-c502-494e-9d34-b2547f0abb1e\") " Oct 11 05:15:03 crc kubenswrapper[4651]: I1011 05:15:03.261376 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3aacd804-c502-494e-9d34-b2547f0abb1e-config-volume" (OuterVolumeSpecName: "config-volume") pod "3aacd804-c502-494e-9d34-b2547f0abb1e" (UID: "3aacd804-c502-494e-9d34-b2547f0abb1e"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:15:03 crc kubenswrapper[4651]: I1011 05:15:03.266804 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3aacd804-c502-494e-9d34-b2547f0abb1e-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "3aacd804-c502-494e-9d34-b2547f0abb1e" (UID: "3aacd804-c502-494e-9d34-b2547f0abb1e"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:15:03 crc kubenswrapper[4651]: I1011 05:15:03.267621 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3aacd804-c502-494e-9d34-b2547f0abb1e-kube-api-access-bmjz4" (OuterVolumeSpecName: "kube-api-access-bmjz4") pod "3aacd804-c502-494e-9d34-b2547f0abb1e" (UID: "3aacd804-c502-494e-9d34-b2547f0abb1e"). InnerVolumeSpecName "kube-api-access-bmjz4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:15:03 crc kubenswrapper[4651]: I1011 05:15:03.363724 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bmjz4\" (UniqueName: \"kubernetes.io/projected/3aacd804-c502-494e-9d34-b2547f0abb1e-kube-api-access-bmjz4\") on node \"crc\" DevicePath \"\"" Oct 11 05:15:03 crc kubenswrapper[4651]: I1011 05:15:03.363775 4651 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3aacd804-c502-494e-9d34-b2547f0abb1e-config-volume\") on node \"crc\" DevicePath \"\"" Oct 11 05:15:03 crc kubenswrapper[4651]: I1011 05:15:03.363789 4651 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3aacd804-c502-494e-9d34-b2547f0abb1e-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 11 05:15:03 crc kubenswrapper[4651]: I1011 05:15:03.831617 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29335995-g6dc8" Oct 11 05:15:03 crc kubenswrapper[4651]: I1011 05:15:03.831620 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29335995-g6dc8" event={"ID":"3aacd804-c502-494e-9d34-b2547f0abb1e","Type":"ContainerDied","Data":"f5e9b03565c1c50a64184973118d76175d862b260862fdaed9f2ab7052e00079"} Oct 11 05:15:03 crc kubenswrapper[4651]: I1011 05:15:03.831999 4651 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f5e9b03565c1c50a64184973118d76175d862b260862fdaed9f2ab7052e00079" Oct 11 05:15:03 crc kubenswrapper[4651]: I1011 05:15:03.890091 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-pfgdv" Oct 11 05:15:03 crc kubenswrapper[4651]: I1011 05:15:03.940418 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-pfgdv"] Oct 11 05:15:05 crc kubenswrapper[4651]: I1011 05:15:05.850008 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-pfgdv" podUID="3dd1afe3-22e4-4818-bdfe-5f273132c530" containerName="registry-server" containerID="cri-o://a5425b0df1da2e2c1fe742c7b1a53a07e776cb5cef3b8c98b12e7356e880f404" gracePeriod=2 Oct 11 05:15:06 crc kubenswrapper[4651]: I1011 05:15:06.380031 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-pfgdv" Oct 11 05:15:06 crc kubenswrapper[4651]: I1011 05:15:06.525627 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3dd1afe3-22e4-4818-bdfe-5f273132c530-catalog-content\") pod \"3dd1afe3-22e4-4818-bdfe-5f273132c530\" (UID: \"3dd1afe3-22e4-4818-bdfe-5f273132c530\") " Oct 11 05:15:06 crc kubenswrapper[4651]: I1011 05:15:06.525721 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bg56r\" (UniqueName: \"kubernetes.io/projected/3dd1afe3-22e4-4818-bdfe-5f273132c530-kube-api-access-bg56r\") pod \"3dd1afe3-22e4-4818-bdfe-5f273132c530\" (UID: \"3dd1afe3-22e4-4818-bdfe-5f273132c530\") " Oct 11 05:15:06 crc kubenswrapper[4651]: I1011 05:15:06.525754 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3dd1afe3-22e4-4818-bdfe-5f273132c530-utilities\") pod \"3dd1afe3-22e4-4818-bdfe-5f273132c530\" (UID: \"3dd1afe3-22e4-4818-bdfe-5f273132c530\") " Oct 11 05:15:06 crc kubenswrapper[4651]: I1011 05:15:06.526635 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3dd1afe3-22e4-4818-bdfe-5f273132c530-utilities" (OuterVolumeSpecName: "utilities") pod "3dd1afe3-22e4-4818-bdfe-5f273132c530" (UID: "3dd1afe3-22e4-4818-bdfe-5f273132c530"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 05:15:06 crc kubenswrapper[4651]: I1011 05:15:06.531516 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3dd1afe3-22e4-4818-bdfe-5f273132c530-kube-api-access-bg56r" (OuterVolumeSpecName: "kube-api-access-bg56r") pod "3dd1afe3-22e4-4818-bdfe-5f273132c530" (UID: "3dd1afe3-22e4-4818-bdfe-5f273132c530"). InnerVolumeSpecName "kube-api-access-bg56r". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:15:06 crc kubenswrapper[4651]: I1011 05:15:06.607651 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3dd1afe3-22e4-4818-bdfe-5f273132c530-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3dd1afe3-22e4-4818-bdfe-5f273132c530" (UID: "3dd1afe3-22e4-4818-bdfe-5f273132c530"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 05:15:06 crc kubenswrapper[4651]: I1011 05:15:06.628283 4651 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3dd1afe3-22e4-4818-bdfe-5f273132c530-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 11 05:15:06 crc kubenswrapper[4651]: I1011 05:15:06.628330 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bg56r\" (UniqueName: \"kubernetes.io/projected/3dd1afe3-22e4-4818-bdfe-5f273132c530-kube-api-access-bg56r\") on node \"crc\" DevicePath \"\"" Oct 11 05:15:06 crc kubenswrapper[4651]: I1011 05:15:06.628346 4651 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3dd1afe3-22e4-4818-bdfe-5f273132c530-utilities\") on node \"crc\" DevicePath \"\"" Oct 11 05:15:06 crc kubenswrapper[4651]: I1011 05:15:06.863125 4651 generic.go:334] "Generic (PLEG): container finished" podID="3dd1afe3-22e4-4818-bdfe-5f273132c530" containerID="a5425b0df1da2e2c1fe742c7b1a53a07e776cb5cef3b8c98b12e7356e880f404" exitCode=0 Oct 11 05:15:06 crc kubenswrapper[4651]: I1011 05:15:06.863168 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pfgdv" event={"ID":"3dd1afe3-22e4-4818-bdfe-5f273132c530","Type":"ContainerDied","Data":"a5425b0df1da2e2c1fe742c7b1a53a07e776cb5cef3b8c98b12e7356e880f404"} Oct 11 05:15:06 crc kubenswrapper[4651]: I1011 05:15:06.863204 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pfgdv" event={"ID":"3dd1afe3-22e4-4818-bdfe-5f273132c530","Type":"ContainerDied","Data":"6efac95b0899b36a88a23c59277bc61f64749c8425dbd98072f7ad39d09096df"} Oct 11 05:15:06 crc kubenswrapper[4651]: I1011 05:15:06.863202 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-pfgdv" Oct 11 05:15:06 crc kubenswrapper[4651]: I1011 05:15:06.863226 4651 scope.go:117] "RemoveContainer" containerID="a5425b0df1da2e2c1fe742c7b1a53a07e776cb5cef3b8c98b12e7356e880f404" Oct 11 05:15:06 crc kubenswrapper[4651]: I1011 05:15:06.891416 4651 scope.go:117] "RemoveContainer" containerID="559b599a149f653bc182fc23d044f1fa1dedc1249e4f093927edaeed8f69d8f3" Oct 11 05:15:06 crc kubenswrapper[4651]: I1011 05:15:06.912446 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-pfgdv"] Oct 11 05:15:06 crc kubenswrapper[4651]: I1011 05:15:06.923769 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-pfgdv"] Oct 11 05:15:06 crc kubenswrapper[4651]: I1011 05:15:06.930105 4651 scope.go:117] "RemoveContainer" containerID="0cf7295d8ddbbef778d2c91f311957bfafe775a7965132dd08130f9fed071fe0" Oct 11 05:15:06 crc kubenswrapper[4651]: I1011 05:15:06.968345 4651 scope.go:117] "RemoveContainer" containerID="a5425b0df1da2e2c1fe742c7b1a53a07e776cb5cef3b8c98b12e7356e880f404" Oct 11 05:15:06 crc kubenswrapper[4651]: E1011 05:15:06.968856 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a5425b0df1da2e2c1fe742c7b1a53a07e776cb5cef3b8c98b12e7356e880f404\": container with ID starting with a5425b0df1da2e2c1fe742c7b1a53a07e776cb5cef3b8c98b12e7356e880f404 not found: ID does not exist" containerID="a5425b0df1da2e2c1fe742c7b1a53a07e776cb5cef3b8c98b12e7356e880f404" Oct 11 05:15:06 crc kubenswrapper[4651]: I1011 05:15:06.968916 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a5425b0df1da2e2c1fe742c7b1a53a07e776cb5cef3b8c98b12e7356e880f404"} err="failed to get container status \"a5425b0df1da2e2c1fe742c7b1a53a07e776cb5cef3b8c98b12e7356e880f404\": rpc error: code = NotFound desc = could not find container \"a5425b0df1da2e2c1fe742c7b1a53a07e776cb5cef3b8c98b12e7356e880f404\": container with ID starting with a5425b0df1da2e2c1fe742c7b1a53a07e776cb5cef3b8c98b12e7356e880f404 not found: ID does not exist" Oct 11 05:15:06 crc kubenswrapper[4651]: I1011 05:15:06.968978 4651 scope.go:117] "RemoveContainer" containerID="559b599a149f653bc182fc23d044f1fa1dedc1249e4f093927edaeed8f69d8f3" Oct 11 05:15:06 crc kubenswrapper[4651]: E1011 05:15:06.969435 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"559b599a149f653bc182fc23d044f1fa1dedc1249e4f093927edaeed8f69d8f3\": container with ID starting with 559b599a149f653bc182fc23d044f1fa1dedc1249e4f093927edaeed8f69d8f3 not found: ID does not exist" containerID="559b599a149f653bc182fc23d044f1fa1dedc1249e4f093927edaeed8f69d8f3" Oct 11 05:15:06 crc kubenswrapper[4651]: I1011 05:15:06.969485 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"559b599a149f653bc182fc23d044f1fa1dedc1249e4f093927edaeed8f69d8f3"} err="failed to get container status \"559b599a149f653bc182fc23d044f1fa1dedc1249e4f093927edaeed8f69d8f3\": rpc error: code = NotFound desc = could not find container \"559b599a149f653bc182fc23d044f1fa1dedc1249e4f093927edaeed8f69d8f3\": container with ID starting with 559b599a149f653bc182fc23d044f1fa1dedc1249e4f093927edaeed8f69d8f3 not found: ID does not exist" Oct 11 05:15:06 crc kubenswrapper[4651]: I1011 05:15:06.969510 4651 scope.go:117] "RemoveContainer" containerID="0cf7295d8ddbbef778d2c91f311957bfafe775a7965132dd08130f9fed071fe0" Oct 11 05:15:06 crc kubenswrapper[4651]: E1011 05:15:06.969855 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0cf7295d8ddbbef778d2c91f311957bfafe775a7965132dd08130f9fed071fe0\": container with ID starting with 0cf7295d8ddbbef778d2c91f311957bfafe775a7965132dd08130f9fed071fe0 not found: ID does not exist" containerID="0cf7295d8ddbbef778d2c91f311957bfafe775a7965132dd08130f9fed071fe0" Oct 11 05:15:06 crc kubenswrapper[4651]: I1011 05:15:06.969896 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0cf7295d8ddbbef778d2c91f311957bfafe775a7965132dd08130f9fed071fe0"} err="failed to get container status \"0cf7295d8ddbbef778d2c91f311957bfafe775a7965132dd08130f9fed071fe0\": rpc error: code = NotFound desc = could not find container \"0cf7295d8ddbbef778d2c91f311957bfafe775a7965132dd08130f9fed071fe0\": container with ID starting with 0cf7295d8ddbbef778d2c91f311957bfafe775a7965132dd08130f9fed071fe0 not found: ID does not exist" Oct 11 05:15:07 crc kubenswrapper[4651]: I1011 05:15:07.891317 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3dd1afe3-22e4-4818-bdfe-5f273132c530" path="/var/lib/kubelet/pods/3dd1afe3-22e4-4818-bdfe-5f273132c530/volumes" Oct 11 05:16:04 crc kubenswrapper[4651]: I1011 05:16:04.473622 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-fkjxc"] Oct 11 05:16:04 crc kubenswrapper[4651]: E1011 05:16:04.474855 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3dd1afe3-22e4-4818-bdfe-5f273132c530" containerName="registry-server" Oct 11 05:16:04 crc kubenswrapper[4651]: I1011 05:16:04.474878 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="3dd1afe3-22e4-4818-bdfe-5f273132c530" containerName="registry-server" Oct 11 05:16:04 crc kubenswrapper[4651]: E1011 05:16:04.474904 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3dd1afe3-22e4-4818-bdfe-5f273132c530" containerName="extract-utilities" Oct 11 05:16:04 crc kubenswrapper[4651]: I1011 05:16:04.474915 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="3dd1afe3-22e4-4818-bdfe-5f273132c530" containerName="extract-utilities" Oct 11 05:16:04 crc kubenswrapper[4651]: E1011 05:16:04.474945 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3aacd804-c502-494e-9d34-b2547f0abb1e" containerName="collect-profiles" Oct 11 05:16:04 crc kubenswrapper[4651]: I1011 05:16:04.474958 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="3aacd804-c502-494e-9d34-b2547f0abb1e" containerName="collect-profiles" Oct 11 05:16:04 crc kubenswrapper[4651]: E1011 05:16:04.474978 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3dd1afe3-22e4-4818-bdfe-5f273132c530" containerName="extract-content" Oct 11 05:16:04 crc kubenswrapper[4651]: I1011 05:16:04.474990 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="3dd1afe3-22e4-4818-bdfe-5f273132c530" containerName="extract-content" Oct 11 05:16:04 crc kubenswrapper[4651]: I1011 05:16:04.475305 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="3dd1afe3-22e4-4818-bdfe-5f273132c530" containerName="registry-server" Oct 11 05:16:04 crc kubenswrapper[4651]: I1011 05:16:04.476009 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="3aacd804-c502-494e-9d34-b2547f0abb1e" containerName="collect-profiles" Oct 11 05:16:04 crc kubenswrapper[4651]: I1011 05:16:04.478215 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fkjxc" Oct 11 05:16:04 crc kubenswrapper[4651]: I1011 05:16:04.505412 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-fkjxc"] Oct 11 05:16:04 crc kubenswrapper[4651]: I1011 05:16:04.635083 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/670e1409-41ba-449a-a5ad-d7a345277a30-utilities\") pod \"community-operators-fkjxc\" (UID: \"670e1409-41ba-449a-a5ad-d7a345277a30\") " pod="openshift-marketplace/community-operators-fkjxc" Oct 11 05:16:04 crc kubenswrapper[4651]: I1011 05:16:04.635362 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/670e1409-41ba-449a-a5ad-d7a345277a30-catalog-content\") pod \"community-operators-fkjxc\" (UID: \"670e1409-41ba-449a-a5ad-d7a345277a30\") " pod="openshift-marketplace/community-operators-fkjxc" Oct 11 05:16:04 crc kubenswrapper[4651]: I1011 05:16:04.635491 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9k6vk\" (UniqueName: \"kubernetes.io/projected/670e1409-41ba-449a-a5ad-d7a345277a30-kube-api-access-9k6vk\") pod \"community-operators-fkjxc\" (UID: \"670e1409-41ba-449a-a5ad-d7a345277a30\") " pod="openshift-marketplace/community-operators-fkjxc" Oct 11 05:16:04 crc kubenswrapper[4651]: I1011 05:16:04.737217 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/670e1409-41ba-449a-a5ad-d7a345277a30-utilities\") pod \"community-operators-fkjxc\" (UID: \"670e1409-41ba-449a-a5ad-d7a345277a30\") " pod="openshift-marketplace/community-operators-fkjxc" Oct 11 05:16:04 crc kubenswrapper[4651]: I1011 05:16:04.737517 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/670e1409-41ba-449a-a5ad-d7a345277a30-catalog-content\") pod \"community-operators-fkjxc\" (UID: \"670e1409-41ba-449a-a5ad-d7a345277a30\") " pod="openshift-marketplace/community-operators-fkjxc" Oct 11 05:16:04 crc kubenswrapper[4651]: I1011 05:16:04.737666 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9k6vk\" (UniqueName: \"kubernetes.io/projected/670e1409-41ba-449a-a5ad-d7a345277a30-kube-api-access-9k6vk\") pod \"community-operators-fkjxc\" (UID: \"670e1409-41ba-449a-a5ad-d7a345277a30\") " pod="openshift-marketplace/community-operators-fkjxc" Oct 11 05:16:04 crc kubenswrapper[4651]: I1011 05:16:04.738131 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/670e1409-41ba-449a-a5ad-d7a345277a30-catalog-content\") pod \"community-operators-fkjxc\" (UID: \"670e1409-41ba-449a-a5ad-d7a345277a30\") " pod="openshift-marketplace/community-operators-fkjxc" Oct 11 05:16:04 crc kubenswrapper[4651]: I1011 05:16:04.738213 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/670e1409-41ba-449a-a5ad-d7a345277a30-utilities\") pod \"community-operators-fkjxc\" (UID: \"670e1409-41ba-449a-a5ad-d7a345277a30\") " pod="openshift-marketplace/community-operators-fkjxc" Oct 11 05:16:04 crc kubenswrapper[4651]: I1011 05:16:04.756079 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9k6vk\" (UniqueName: \"kubernetes.io/projected/670e1409-41ba-449a-a5ad-d7a345277a30-kube-api-access-9k6vk\") pod \"community-operators-fkjxc\" (UID: \"670e1409-41ba-449a-a5ad-d7a345277a30\") " pod="openshift-marketplace/community-operators-fkjxc" Oct 11 05:16:04 crc kubenswrapper[4651]: I1011 05:16:04.812734 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fkjxc" Oct 11 05:16:05 crc kubenswrapper[4651]: I1011 05:16:05.321046 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-fkjxc"] Oct 11 05:16:05 crc kubenswrapper[4651]: I1011 05:16:05.511945 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fkjxc" event={"ID":"670e1409-41ba-449a-a5ad-d7a345277a30","Type":"ContainerStarted","Data":"2f03c565834096dbe54d14e394cd06d21681dd929c2cb2e186c164aa2429ebd8"} Oct 11 05:16:06 crc kubenswrapper[4651]: I1011 05:16:06.524465 4651 generic.go:334] "Generic (PLEG): container finished" podID="073ca1d1-d406-4d47-bfdd-1d1ccc6a0444" containerID="bcba071824c1fc58f59a9d110d949db9082919156ef12bbacaaef6d3b8a1a5c7" exitCode=0 Oct 11 05:16:06 crc kubenswrapper[4651]: I1011 05:16:06.524536 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-h2pxs" event={"ID":"073ca1d1-d406-4d47-bfdd-1d1ccc6a0444","Type":"ContainerDied","Data":"bcba071824c1fc58f59a9d110d949db9082919156ef12bbacaaef6d3b8a1a5c7"} Oct 11 05:16:06 crc kubenswrapper[4651]: I1011 05:16:06.526981 4651 generic.go:334] "Generic (PLEG): container finished" podID="670e1409-41ba-449a-a5ad-d7a345277a30" containerID="7317cc34ff2e9c1aa81b8f666f2f5dfec284cf07fd51d8b31250597b5807d7ea" exitCode=0 Oct 11 05:16:06 crc kubenswrapper[4651]: I1011 05:16:06.527025 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fkjxc" event={"ID":"670e1409-41ba-449a-a5ad-d7a345277a30","Type":"ContainerDied","Data":"7317cc34ff2e9c1aa81b8f666f2f5dfec284cf07fd51d8b31250597b5807d7ea"} Oct 11 05:16:07 crc kubenswrapper[4651]: I1011 05:16:07.538092 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fkjxc" event={"ID":"670e1409-41ba-449a-a5ad-d7a345277a30","Type":"ContainerStarted","Data":"65afd761a4f73526d149ccc82f1180abe64846dcfce68d786cb4a93f12ba5250"} Oct 11 05:16:07 crc kubenswrapper[4651]: I1011 05:16:07.932127 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-h2pxs" Oct 11 05:16:08 crc kubenswrapper[4651]: I1011 05:16:08.107520 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/073ca1d1-d406-4d47-bfdd-1d1ccc6a0444-ssh-key\") pod \"073ca1d1-d406-4d47-bfdd-1d1ccc6a0444\" (UID: \"073ca1d1-d406-4d47-bfdd-1d1ccc6a0444\") " Oct 11 05:16:08 crc kubenswrapper[4651]: I1011 05:16:08.107969 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s8qnr\" (UniqueName: \"kubernetes.io/projected/073ca1d1-d406-4d47-bfdd-1d1ccc6a0444-kube-api-access-s8qnr\") pod \"073ca1d1-d406-4d47-bfdd-1d1ccc6a0444\" (UID: \"073ca1d1-d406-4d47-bfdd-1d1ccc6a0444\") " Oct 11 05:16:08 crc kubenswrapper[4651]: I1011 05:16:08.108168 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/073ca1d1-d406-4d47-bfdd-1d1ccc6a0444-inventory\") pod \"073ca1d1-d406-4d47-bfdd-1d1ccc6a0444\" (UID: \"073ca1d1-d406-4d47-bfdd-1d1ccc6a0444\") " Oct 11 05:16:08 crc kubenswrapper[4651]: I1011 05:16:08.108231 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/073ca1d1-d406-4d47-bfdd-1d1ccc6a0444-bootstrap-combined-ca-bundle\") pod \"073ca1d1-d406-4d47-bfdd-1d1ccc6a0444\" (UID: \"073ca1d1-d406-4d47-bfdd-1d1ccc6a0444\") " Oct 11 05:16:08 crc kubenswrapper[4651]: I1011 05:16:08.115595 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/073ca1d1-d406-4d47-bfdd-1d1ccc6a0444-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "073ca1d1-d406-4d47-bfdd-1d1ccc6a0444" (UID: "073ca1d1-d406-4d47-bfdd-1d1ccc6a0444"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:16:08 crc kubenswrapper[4651]: I1011 05:16:08.116238 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/073ca1d1-d406-4d47-bfdd-1d1ccc6a0444-kube-api-access-s8qnr" (OuterVolumeSpecName: "kube-api-access-s8qnr") pod "073ca1d1-d406-4d47-bfdd-1d1ccc6a0444" (UID: "073ca1d1-d406-4d47-bfdd-1d1ccc6a0444"). InnerVolumeSpecName "kube-api-access-s8qnr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:16:08 crc kubenswrapper[4651]: I1011 05:16:08.134069 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/073ca1d1-d406-4d47-bfdd-1d1ccc6a0444-inventory" (OuterVolumeSpecName: "inventory") pod "073ca1d1-d406-4d47-bfdd-1d1ccc6a0444" (UID: "073ca1d1-d406-4d47-bfdd-1d1ccc6a0444"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:16:08 crc kubenswrapper[4651]: I1011 05:16:08.134403 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/073ca1d1-d406-4d47-bfdd-1d1ccc6a0444-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "073ca1d1-d406-4d47-bfdd-1d1ccc6a0444" (UID: "073ca1d1-d406-4d47-bfdd-1d1ccc6a0444"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:16:08 crc kubenswrapper[4651]: I1011 05:16:08.211648 4651 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/073ca1d1-d406-4d47-bfdd-1d1ccc6a0444-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 05:16:08 crc kubenswrapper[4651]: I1011 05:16:08.211706 4651 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/073ca1d1-d406-4d47-bfdd-1d1ccc6a0444-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 11 05:16:08 crc kubenswrapper[4651]: I1011 05:16:08.211727 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s8qnr\" (UniqueName: \"kubernetes.io/projected/073ca1d1-d406-4d47-bfdd-1d1ccc6a0444-kube-api-access-s8qnr\") on node \"crc\" DevicePath \"\"" Oct 11 05:16:08 crc kubenswrapper[4651]: I1011 05:16:08.211746 4651 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/073ca1d1-d406-4d47-bfdd-1d1ccc6a0444-inventory\") on node \"crc\" DevicePath \"\"" Oct 11 05:16:08 crc kubenswrapper[4651]: I1011 05:16:08.552757 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-h2pxs" event={"ID":"073ca1d1-d406-4d47-bfdd-1d1ccc6a0444","Type":"ContainerDied","Data":"b27763f4bc3908fb15905c66059962f7dead115a701e81d5e6e644eda5e9bf25"} Oct 11 05:16:08 crc kubenswrapper[4651]: I1011 05:16:08.553384 4651 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b27763f4bc3908fb15905c66059962f7dead115a701e81d5e6e644eda5e9bf25" Oct 11 05:16:08 crc kubenswrapper[4651]: I1011 05:16:08.552797 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-h2pxs" Oct 11 05:16:08 crc kubenswrapper[4651]: I1011 05:16:08.555861 4651 generic.go:334] "Generic (PLEG): container finished" podID="670e1409-41ba-449a-a5ad-d7a345277a30" containerID="65afd761a4f73526d149ccc82f1180abe64846dcfce68d786cb4a93f12ba5250" exitCode=0 Oct 11 05:16:08 crc kubenswrapper[4651]: I1011 05:16:08.555902 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fkjxc" event={"ID":"670e1409-41ba-449a-a5ad-d7a345277a30","Type":"ContainerDied","Data":"65afd761a4f73526d149ccc82f1180abe64846dcfce68d786cb4a93f12ba5250"} Oct 11 05:16:08 crc kubenswrapper[4651]: I1011 05:16:08.667803 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-xqtc2"] Oct 11 05:16:08 crc kubenswrapper[4651]: E1011 05:16:08.668655 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="073ca1d1-d406-4d47-bfdd-1d1ccc6a0444" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Oct 11 05:16:08 crc kubenswrapper[4651]: I1011 05:16:08.668696 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="073ca1d1-d406-4d47-bfdd-1d1ccc6a0444" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Oct 11 05:16:08 crc kubenswrapper[4651]: I1011 05:16:08.669186 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="073ca1d1-d406-4d47-bfdd-1d1ccc6a0444" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Oct 11 05:16:08 crc kubenswrapper[4651]: I1011 05:16:08.670235 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-xqtc2" Oct 11 05:16:08 crc kubenswrapper[4651]: I1011 05:16:08.673066 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-r489p" Oct 11 05:16:08 crc kubenswrapper[4651]: I1011 05:16:08.673066 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 11 05:16:08 crc kubenswrapper[4651]: I1011 05:16:08.673307 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 11 05:16:08 crc kubenswrapper[4651]: I1011 05:16:08.673744 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 11 05:16:08 crc kubenswrapper[4651]: I1011 05:16:08.721100 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-xqtc2"] Oct 11 05:16:08 crc kubenswrapper[4651]: I1011 05:16:08.823521 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p87hp\" (UniqueName: \"kubernetes.io/projected/0428933e-bd0d-4be4-94a6-25caf11d1f23-kube-api-access-p87hp\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-xqtc2\" (UID: \"0428933e-bd0d-4be4-94a6-25caf11d1f23\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-xqtc2" Oct 11 05:16:08 crc kubenswrapper[4651]: I1011 05:16:08.824272 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0428933e-bd0d-4be4-94a6-25caf11d1f23-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-xqtc2\" (UID: \"0428933e-bd0d-4be4-94a6-25caf11d1f23\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-xqtc2" Oct 11 05:16:08 crc kubenswrapper[4651]: I1011 05:16:08.824326 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0428933e-bd0d-4be4-94a6-25caf11d1f23-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-xqtc2\" (UID: \"0428933e-bd0d-4be4-94a6-25caf11d1f23\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-xqtc2" Oct 11 05:16:08 crc kubenswrapper[4651]: I1011 05:16:08.927322 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p87hp\" (UniqueName: \"kubernetes.io/projected/0428933e-bd0d-4be4-94a6-25caf11d1f23-kube-api-access-p87hp\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-xqtc2\" (UID: \"0428933e-bd0d-4be4-94a6-25caf11d1f23\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-xqtc2" Oct 11 05:16:08 crc kubenswrapper[4651]: I1011 05:16:08.927585 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0428933e-bd0d-4be4-94a6-25caf11d1f23-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-xqtc2\" (UID: \"0428933e-bd0d-4be4-94a6-25caf11d1f23\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-xqtc2" Oct 11 05:16:08 crc kubenswrapper[4651]: I1011 05:16:08.928710 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0428933e-bd0d-4be4-94a6-25caf11d1f23-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-xqtc2\" (UID: \"0428933e-bd0d-4be4-94a6-25caf11d1f23\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-xqtc2" Oct 11 05:16:08 crc kubenswrapper[4651]: I1011 05:16:08.934380 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0428933e-bd0d-4be4-94a6-25caf11d1f23-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-xqtc2\" (UID: \"0428933e-bd0d-4be4-94a6-25caf11d1f23\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-xqtc2" Oct 11 05:16:08 crc kubenswrapper[4651]: I1011 05:16:08.937537 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0428933e-bd0d-4be4-94a6-25caf11d1f23-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-xqtc2\" (UID: \"0428933e-bd0d-4be4-94a6-25caf11d1f23\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-xqtc2" Oct 11 05:16:08 crc kubenswrapper[4651]: I1011 05:16:08.946087 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p87hp\" (UniqueName: \"kubernetes.io/projected/0428933e-bd0d-4be4-94a6-25caf11d1f23-kube-api-access-p87hp\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-xqtc2\" (UID: \"0428933e-bd0d-4be4-94a6-25caf11d1f23\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-xqtc2" Oct 11 05:16:09 crc kubenswrapper[4651]: I1011 05:16:09.007376 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-xqtc2" Oct 11 05:16:09 crc kubenswrapper[4651]: I1011 05:16:09.567667 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fkjxc" event={"ID":"670e1409-41ba-449a-a5ad-d7a345277a30","Type":"ContainerStarted","Data":"0245b820b058a84e4c14e2396491d77b4d18da1d9e8f5e744ab3407f7edc87df"} Oct 11 05:16:09 crc kubenswrapper[4651]: I1011 05:16:09.608429 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-xqtc2"] Oct 11 05:16:09 crc kubenswrapper[4651]: I1011 05:16:09.614451 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-fkjxc" podStartSLOduration=3.195599731 podStartE2EDuration="5.614425394s" podCreationTimestamp="2025-10-11 05:16:04 +0000 UTC" firstStartedPulling="2025-10-11 05:16:06.529398315 +0000 UTC m=+1487.425631111" lastFinishedPulling="2025-10-11 05:16:08.948223978 +0000 UTC m=+1489.844456774" observedRunningTime="2025-10-11 05:16:09.597038413 +0000 UTC m=+1490.493271219" watchObservedRunningTime="2025-10-11 05:16:09.614425394 +0000 UTC m=+1490.510658200" Oct 11 05:16:10 crc kubenswrapper[4651]: I1011 05:16:10.586098 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-xqtc2" event={"ID":"0428933e-bd0d-4be4-94a6-25caf11d1f23","Type":"ContainerStarted","Data":"6e82b0f94dda821748307873615fb9beff0fc0ee3ba6c7cbc0a57286fb950362"} Oct 11 05:16:10 crc kubenswrapper[4651]: I1011 05:16:10.586798 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-xqtc2" event={"ID":"0428933e-bd0d-4be4-94a6-25caf11d1f23","Type":"ContainerStarted","Data":"fba12f3a4bf92c2af7924934509bde51d39b28720844cd2a7a442529202c2b6b"} Oct 11 05:16:10 crc kubenswrapper[4651]: I1011 05:16:10.611421 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-xqtc2" podStartSLOduration=1.961299013 podStartE2EDuration="2.611392061s" podCreationTimestamp="2025-10-11 05:16:08 +0000 UTC" firstStartedPulling="2025-10-11 05:16:09.608201719 +0000 UTC m=+1490.504434515" lastFinishedPulling="2025-10-11 05:16:10.258294727 +0000 UTC m=+1491.154527563" observedRunningTime="2025-10-11 05:16:10.601309533 +0000 UTC m=+1491.497542339" watchObservedRunningTime="2025-10-11 05:16:10.611392061 +0000 UTC m=+1491.507624867" Oct 11 05:16:14 crc kubenswrapper[4651]: I1011 05:16:14.813661 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-fkjxc" Oct 11 05:16:14 crc kubenswrapper[4651]: I1011 05:16:14.814273 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-fkjxc" Oct 11 05:16:14 crc kubenswrapper[4651]: I1011 05:16:14.893101 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-fkjxc" Oct 11 05:16:15 crc kubenswrapper[4651]: I1011 05:16:15.687290 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-fkjxc" Oct 11 05:16:15 crc kubenswrapper[4651]: I1011 05:16:15.769551 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-fkjxc"] Oct 11 05:16:16 crc kubenswrapper[4651]: I1011 05:16:16.309904 4651 patch_prober.go:28] interesting pod/machine-config-daemon-78jnv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 05:16:16 crc kubenswrapper[4651]: I1011 05:16:16.309972 4651 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 05:16:17 crc kubenswrapper[4651]: I1011 05:16:17.647141 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-fkjxc" podUID="670e1409-41ba-449a-a5ad-d7a345277a30" containerName="registry-server" containerID="cri-o://0245b820b058a84e4c14e2396491d77b4d18da1d9e8f5e744ab3407f7edc87df" gracePeriod=2 Oct 11 05:16:18 crc kubenswrapper[4651]: I1011 05:16:18.136772 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fkjxc" Oct 11 05:16:18 crc kubenswrapper[4651]: I1011 05:16:18.318970 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/670e1409-41ba-449a-a5ad-d7a345277a30-utilities\") pod \"670e1409-41ba-449a-a5ad-d7a345277a30\" (UID: \"670e1409-41ba-449a-a5ad-d7a345277a30\") " Oct 11 05:16:18 crc kubenswrapper[4651]: I1011 05:16:18.319083 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/670e1409-41ba-449a-a5ad-d7a345277a30-catalog-content\") pod \"670e1409-41ba-449a-a5ad-d7a345277a30\" (UID: \"670e1409-41ba-449a-a5ad-d7a345277a30\") " Oct 11 05:16:18 crc kubenswrapper[4651]: I1011 05:16:18.319157 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9k6vk\" (UniqueName: \"kubernetes.io/projected/670e1409-41ba-449a-a5ad-d7a345277a30-kube-api-access-9k6vk\") pod \"670e1409-41ba-449a-a5ad-d7a345277a30\" (UID: \"670e1409-41ba-449a-a5ad-d7a345277a30\") " Oct 11 05:16:18 crc kubenswrapper[4651]: I1011 05:16:18.319840 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/670e1409-41ba-449a-a5ad-d7a345277a30-utilities" (OuterVolumeSpecName: "utilities") pod "670e1409-41ba-449a-a5ad-d7a345277a30" (UID: "670e1409-41ba-449a-a5ad-d7a345277a30"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 05:16:18 crc kubenswrapper[4651]: I1011 05:16:18.321162 4651 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/670e1409-41ba-449a-a5ad-d7a345277a30-utilities\") on node \"crc\" DevicePath \"\"" Oct 11 05:16:18 crc kubenswrapper[4651]: I1011 05:16:18.327250 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/670e1409-41ba-449a-a5ad-d7a345277a30-kube-api-access-9k6vk" (OuterVolumeSpecName: "kube-api-access-9k6vk") pod "670e1409-41ba-449a-a5ad-d7a345277a30" (UID: "670e1409-41ba-449a-a5ad-d7a345277a30"). InnerVolumeSpecName "kube-api-access-9k6vk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:16:18 crc kubenswrapper[4651]: I1011 05:16:18.366101 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/670e1409-41ba-449a-a5ad-d7a345277a30-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "670e1409-41ba-449a-a5ad-d7a345277a30" (UID: "670e1409-41ba-449a-a5ad-d7a345277a30"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 05:16:18 crc kubenswrapper[4651]: I1011 05:16:18.423049 4651 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/670e1409-41ba-449a-a5ad-d7a345277a30-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 11 05:16:18 crc kubenswrapper[4651]: I1011 05:16:18.423085 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9k6vk\" (UniqueName: \"kubernetes.io/projected/670e1409-41ba-449a-a5ad-d7a345277a30-kube-api-access-9k6vk\") on node \"crc\" DevicePath \"\"" Oct 11 05:16:18 crc kubenswrapper[4651]: I1011 05:16:18.659681 4651 generic.go:334] "Generic (PLEG): container finished" podID="670e1409-41ba-449a-a5ad-d7a345277a30" containerID="0245b820b058a84e4c14e2396491d77b4d18da1d9e8f5e744ab3407f7edc87df" exitCode=0 Oct 11 05:16:18 crc kubenswrapper[4651]: I1011 05:16:18.659757 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fkjxc" Oct 11 05:16:18 crc kubenswrapper[4651]: I1011 05:16:18.659772 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fkjxc" event={"ID":"670e1409-41ba-449a-a5ad-d7a345277a30","Type":"ContainerDied","Data":"0245b820b058a84e4c14e2396491d77b4d18da1d9e8f5e744ab3407f7edc87df"} Oct 11 05:16:18 crc kubenswrapper[4651]: I1011 05:16:18.660239 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fkjxc" event={"ID":"670e1409-41ba-449a-a5ad-d7a345277a30","Type":"ContainerDied","Data":"2f03c565834096dbe54d14e394cd06d21681dd929c2cb2e186c164aa2429ebd8"} Oct 11 05:16:18 crc kubenswrapper[4651]: I1011 05:16:18.660270 4651 scope.go:117] "RemoveContainer" containerID="0245b820b058a84e4c14e2396491d77b4d18da1d9e8f5e744ab3407f7edc87df" Oct 11 05:16:18 crc kubenswrapper[4651]: I1011 05:16:18.699440 4651 scope.go:117] "RemoveContainer" containerID="65afd761a4f73526d149ccc82f1180abe64846dcfce68d786cb4a93f12ba5250" Oct 11 05:16:18 crc kubenswrapper[4651]: I1011 05:16:18.702392 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-fkjxc"] Oct 11 05:16:18 crc kubenswrapper[4651]: I1011 05:16:18.717119 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-fkjxc"] Oct 11 05:16:18 crc kubenswrapper[4651]: I1011 05:16:18.744378 4651 scope.go:117] "RemoveContainer" containerID="7317cc34ff2e9c1aa81b8f666f2f5dfec284cf07fd51d8b31250597b5807d7ea" Oct 11 05:16:18 crc kubenswrapper[4651]: I1011 05:16:18.764982 4651 scope.go:117] "RemoveContainer" containerID="0245b820b058a84e4c14e2396491d77b4d18da1d9e8f5e744ab3407f7edc87df" Oct 11 05:16:18 crc kubenswrapper[4651]: E1011 05:16:18.765412 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0245b820b058a84e4c14e2396491d77b4d18da1d9e8f5e744ab3407f7edc87df\": container with ID starting with 0245b820b058a84e4c14e2396491d77b4d18da1d9e8f5e744ab3407f7edc87df not found: ID does not exist" containerID="0245b820b058a84e4c14e2396491d77b4d18da1d9e8f5e744ab3407f7edc87df" Oct 11 05:16:18 crc kubenswrapper[4651]: I1011 05:16:18.765445 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0245b820b058a84e4c14e2396491d77b4d18da1d9e8f5e744ab3407f7edc87df"} err="failed to get container status \"0245b820b058a84e4c14e2396491d77b4d18da1d9e8f5e744ab3407f7edc87df\": rpc error: code = NotFound desc = could not find container \"0245b820b058a84e4c14e2396491d77b4d18da1d9e8f5e744ab3407f7edc87df\": container with ID starting with 0245b820b058a84e4c14e2396491d77b4d18da1d9e8f5e744ab3407f7edc87df not found: ID does not exist" Oct 11 05:16:18 crc kubenswrapper[4651]: I1011 05:16:18.765466 4651 scope.go:117] "RemoveContainer" containerID="65afd761a4f73526d149ccc82f1180abe64846dcfce68d786cb4a93f12ba5250" Oct 11 05:16:18 crc kubenswrapper[4651]: E1011 05:16:18.765772 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"65afd761a4f73526d149ccc82f1180abe64846dcfce68d786cb4a93f12ba5250\": container with ID starting with 65afd761a4f73526d149ccc82f1180abe64846dcfce68d786cb4a93f12ba5250 not found: ID does not exist" containerID="65afd761a4f73526d149ccc82f1180abe64846dcfce68d786cb4a93f12ba5250" Oct 11 05:16:18 crc kubenswrapper[4651]: I1011 05:16:18.765893 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"65afd761a4f73526d149ccc82f1180abe64846dcfce68d786cb4a93f12ba5250"} err="failed to get container status \"65afd761a4f73526d149ccc82f1180abe64846dcfce68d786cb4a93f12ba5250\": rpc error: code = NotFound desc = could not find container \"65afd761a4f73526d149ccc82f1180abe64846dcfce68d786cb4a93f12ba5250\": container with ID starting with 65afd761a4f73526d149ccc82f1180abe64846dcfce68d786cb4a93f12ba5250 not found: ID does not exist" Oct 11 05:16:18 crc kubenswrapper[4651]: I1011 05:16:18.765949 4651 scope.go:117] "RemoveContainer" containerID="7317cc34ff2e9c1aa81b8f666f2f5dfec284cf07fd51d8b31250597b5807d7ea" Oct 11 05:16:18 crc kubenswrapper[4651]: E1011 05:16:18.766302 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7317cc34ff2e9c1aa81b8f666f2f5dfec284cf07fd51d8b31250597b5807d7ea\": container with ID starting with 7317cc34ff2e9c1aa81b8f666f2f5dfec284cf07fd51d8b31250597b5807d7ea not found: ID does not exist" containerID="7317cc34ff2e9c1aa81b8f666f2f5dfec284cf07fd51d8b31250597b5807d7ea" Oct 11 05:16:18 crc kubenswrapper[4651]: I1011 05:16:18.766330 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7317cc34ff2e9c1aa81b8f666f2f5dfec284cf07fd51d8b31250597b5807d7ea"} err="failed to get container status \"7317cc34ff2e9c1aa81b8f666f2f5dfec284cf07fd51d8b31250597b5807d7ea\": rpc error: code = NotFound desc = could not find container \"7317cc34ff2e9c1aa81b8f666f2f5dfec284cf07fd51d8b31250597b5807d7ea\": container with ID starting with 7317cc34ff2e9c1aa81b8f666f2f5dfec284cf07fd51d8b31250597b5807d7ea not found: ID does not exist" Oct 11 05:16:19 crc kubenswrapper[4651]: I1011 05:16:19.894940 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="670e1409-41ba-449a-a5ad-d7a345277a30" path="/var/lib/kubelet/pods/670e1409-41ba-449a-a5ad-d7a345277a30/volumes" Oct 11 05:16:24 crc kubenswrapper[4651]: I1011 05:16:24.567392 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-5whrf"] Oct 11 05:16:24 crc kubenswrapper[4651]: E1011 05:16:24.568344 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="670e1409-41ba-449a-a5ad-d7a345277a30" containerName="extract-content" Oct 11 05:16:24 crc kubenswrapper[4651]: I1011 05:16:24.568362 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="670e1409-41ba-449a-a5ad-d7a345277a30" containerName="extract-content" Oct 11 05:16:24 crc kubenswrapper[4651]: E1011 05:16:24.568393 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="670e1409-41ba-449a-a5ad-d7a345277a30" containerName="registry-server" Oct 11 05:16:24 crc kubenswrapper[4651]: I1011 05:16:24.568402 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="670e1409-41ba-449a-a5ad-d7a345277a30" containerName="registry-server" Oct 11 05:16:24 crc kubenswrapper[4651]: E1011 05:16:24.568441 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="670e1409-41ba-449a-a5ad-d7a345277a30" containerName="extract-utilities" Oct 11 05:16:24 crc kubenswrapper[4651]: I1011 05:16:24.568450 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="670e1409-41ba-449a-a5ad-d7a345277a30" containerName="extract-utilities" Oct 11 05:16:24 crc kubenswrapper[4651]: I1011 05:16:24.568686 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="670e1409-41ba-449a-a5ad-d7a345277a30" containerName="registry-server" Oct 11 05:16:24 crc kubenswrapper[4651]: I1011 05:16:24.570388 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5whrf" Oct 11 05:16:24 crc kubenswrapper[4651]: I1011 05:16:24.590213 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-5whrf"] Oct 11 05:16:24 crc kubenswrapper[4651]: I1011 05:16:24.741850 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3626a1ea-0b6c-4be2-9942-7d3bce69a42d-catalog-content\") pod \"redhat-operators-5whrf\" (UID: \"3626a1ea-0b6c-4be2-9942-7d3bce69a42d\") " pod="openshift-marketplace/redhat-operators-5whrf" Oct 11 05:16:24 crc kubenswrapper[4651]: I1011 05:16:24.742101 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-btt52\" (UniqueName: \"kubernetes.io/projected/3626a1ea-0b6c-4be2-9942-7d3bce69a42d-kube-api-access-btt52\") pod \"redhat-operators-5whrf\" (UID: \"3626a1ea-0b6c-4be2-9942-7d3bce69a42d\") " pod="openshift-marketplace/redhat-operators-5whrf" Oct 11 05:16:24 crc kubenswrapper[4651]: I1011 05:16:24.742245 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3626a1ea-0b6c-4be2-9942-7d3bce69a42d-utilities\") pod \"redhat-operators-5whrf\" (UID: \"3626a1ea-0b6c-4be2-9942-7d3bce69a42d\") " pod="openshift-marketplace/redhat-operators-5whrf" Oct 11 05:16:24 crc kubenswrapper[4651]: I1011 05:16:24.843428 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3626a1ea-0b6c-4be2-9942-7d3bce69a42d-utilities\") pod \"redhat-operators-5whrf\" (UID: \"3626a1ea-0b6c-4be2-9942-7d3bce69a42d\") " pod="openshift-marketplace/redhat-operators-5whrf" Oct 11 05:16:24 crc kubenswrapper[4651]: I1011 05:16:24.843501 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3626a1ea-0b6c-4be2-9942-7d3bce69a42d-catalog-content\") pod \"redhat-operators-5whrf\" (UID: \"3626a1ea-0b6c-4be2-9942-7d3bce69a42d\") " pod="openshift-marketplace/redhat-operators-5whrf" Oct 11 05:16:24 crc kubenswrapper[4651]: I1011 05:16:24.843533 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-btt52\" (UniqueName: \"kubernetes.io/projected/3626a1ea-0b6c-4be2-9942-7d3bce69a42d-kube-api-access-btt52\") pod \"redhat-operators-5whrf\" (UID: \"3626a1ea-0b6c-4be2-9942-7d3bce69a42d\") " pod="openshift-marketplace/redhat-operators-5whrf" Oct 11 05:16:24 crc kubenswrapper[4651]: I1011 05:16:24.844433 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3626a1ea-0b6c-4be2-9942-7d3bce69a42d-catalog-content\") pod \"redhat-operators-5whrf\" (UID: \"3626a1ea-0b6c-4be2-9942-7d3bce69a42d\") " pod="openshift-marketplace/redhat-operators-5whrf" Oct 11 05:16:24 crc kubenswrapper[4651]: I1011 05:16:24.844449 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3626a1ea-0b6c-4be2-9942-7d3bce69a42d-utilities\") pod \"redhat-operators-5whrf\" (UID: \"3626a1ea-0b6c-4be2-9942-7d3bce69a42d\") " pod="openshift-marketplace/redhat-operators-5whrf" Oct 11 05:16:24 crc kubenswrapper[4651]: I1011 05:16:24.867209 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-btt52\" (UniqueName: \"kubernetes.io/projected/3626a1ea-0b6c-4be2-9942-7d3bce69a42d-kube-api-access-btt52\") pod \"redhat-operators-5whrf\" (UID: \"3626a1ea-0b6c-4be2-9942-7d3bce69a42d\") " pod="openshift-marketplace/redhat-operators-5whrf" Oct 11 05:16:24 crc kubenswrapper[4651]: I1011 05:16:24.906776 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5whrf" Oct 11 05:16:25 crc kubenswrapper[4651]: I1011 05:16:25.374249 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-5whrf"] Oct 11 05:16:25 crc kubenswrapper[4651]: W1011 05:16:25.379340 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3626a1ea_0b6c_4be2_9942_7d3bce69a42d.slice/crio-03462327ac40a9f048e788833b223c738976655a7cb08634b4c240264b1a4d90 WatchSource:0}: Error finding container 03462327ac40a9f048e788833b223c738976655a7cb08634b4c240264b1a4d90: Status 404 returned error can't find the container with id 03462327ac40a9f048e788833b223c738976655a7cb08634b4c240264b1a4d90 Oct 11 05:16:25 crc kubenswrapper[4651]: I1011 05:16:25.733968 4651 generic.go:334] "Generic (PLEG): container finished" podID="3626a1ea-0b6c-4be2-9942-7d3bce69a42d" containerID="2c0de5863fc60af9fe62166fb3a257e8783c6485c3e98d76f6d9086001a30aa9" exitCode=0 Oct 11 05:16:25 crc kubenswrapper[4651]: I1011 05:16:25.734026 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5whrf" event={"ID":"3626a1ea-0b6c-4be2-9942-7d3bce69a42d","Type":"ContainerDied","Data":"2c0de5863fc60af9fe62166fb3a257e8783c6485c3e98d76f6d9086001a30aa9"} Oct 11 05:16:25 crc kubenswrapper[4651]: I1011 05:16:25.734631 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5whrf" event={"ID":"3626a1ea-0b6c-4be2-9942-7d3bce69a42d","Type":"ContainerStarted","Data":"03462327ac40a9f048e788833b223c738976655a7cb08634b4c240264b1a4d90"} Oct 11 05:16:27 crc kubenswrapper[4651]: I1011 05:16:27.760701 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5whrf" event={"ID":"3626a1ea-0b6c-4be2-9942-7d3bce69a42d","Type":"ContainerStarted","Data":"bc77ee4498d84c0a735aed7ea6610792c55375d2bfd8392bae91af57b0e73af4"} Oct 11 05:16:28 crc kubenswrapper[4651]: I1011 05:16:28.780989 4651 generic.go:334] "Generic (PLEG): container finished" podID="3626a1ea-0b6c-4be2-9942-7d3bce69a42d" containerID="bc77ee4498d84c0a735aed7ea6610792c55375d2bfd8392bae91af57b0e73af4" exitCode=0 Oct 11 05:16:28 crc kubenswrapper[4651]: I1011 05:16:28.781064 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5whrf" event={"ID":"3626a1ea-0b6c-4be2-9942-7d3bce69a42d","Type":"ContainerDied","Data":"bc77ee4498d84c0a735aed7ea6610792c55375d2bfd8392bae91af57b0e73af4"} Oct 11 05:16:29 crc kubenswrapper[4651]: I1011 05:16:29.811017 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5whrf" event={"ID":"3626a1ea-0b6c-4be2-9942-7d3bce69a42d","Type":"ContainerStarted","Data":"7e819cbc61e8b71a624eb149d5350f3fed44eca67aea3130e36b23cd283f4a14"} Oct 11 05:16:29 crc kubenswrapper[4651]: I1011 05:16:29.845353 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-5whrf" podStartSLOduration=2.300220816 podStartE2EDuration="5.845332735s" podCreationTimestamp="2025-10-11 05:16:24 +0000 UTC" firstStartedPulling="2025-10-11 05:16:25.735644624 +0000 UTC m=+1506.631877420" lastFinishedPulling="2025-10-11 05:16:29.280756533 +0000 UTC m=+1510.176989339" observedRunningTime="2025-10-11 05:16:29.833223273 +0000 UTC m=+1510.729456079" watchObservedRunningTime="2025-10-11 05:16:29.845332735 +0000 UTC m=+1510.741565531" Oct 11 05:16:32 crc kubenswrapper[4651]: I1011 05:16:32.699965 4651 scope.go:117] "RemoveContainer" containerID="77e5112d038240ad7b4f0dee7a7dbf0208e5696fc759677bdb99b1eef7e5d1fc" Oct 11 05:16:34 crc kubenswrapper[4651]: I1011 05:16:34.907083 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-5whrf" Oct 11 05:16:34 crc kubenswrapper[4651]: I1011 05:16:34.907579 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-5whrf" Oct 11 05:16:34 crc kubenswrapper[4651]: I1011 05:16:34.959190 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-5whrf" Oct 11 05:16:35 crc kubenswrapper[4651]: I1011 05:16:35.947359 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-5whrf" Oct 11 05:16:36 crc kubenswrapper[4651]: I1011 05:16:36.029700 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-5whrf"] Oct 11 05:16:37 crc kubenswrapper[4651]: I1011 05:16:37.893322 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-5whrf" podUID="3626a1ea-0b6c-4be2-9942-7d3bce69a42d" containerName="registry-server" containerID="cri-o://7e819cbc61e8b71a624eb149d5350f3fed44eca67aea3130e36b23cd283f4a14" gracePeriod=2 Oct 11 05:16:38 crc kubenswrapper[4651]: I1011 05:16:38.436429 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5whrf" Oct 11 05:16:38 crc kubenswrapper[4651]: I1011 05:16:38.521766 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-btt52\" (UniqueName: \"kubernetes.io/projected/3626a1ea-0b6c-4be2-9942-7d3bce69a42d-kube-api-access-btt52\") pod \"3626a1ea-0b6c-4be2-9942-7d3bce69a42d\" (UID: \"3626a1ea-0b6c-4be2-9942-7d3bce69a42d\") " Oct 11 05:16:38 crc kubenswrapper[4651]: I1011 05:16:38.521991 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3626a1ea-0b6c-4be2-9942-7d3bce69a42d-catalog-content\") pod \"3626a1ea-0b6c-4be2-9942-7d3bce69a42d\" (UID: \"3626a1ea-0b6c-4be2-9942-7d3bce69a42d\") " Oct 11 05:16:38 crc kubenswrapper[4651]: I1011 05:16:38.522036 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3626a1ea-0b6c-4be2-9942-7d3bce69a42d-utilities\") pod \"3626a1ea-0b6c-4be2-9942-7d3bce69a42d\" (UID: \"3626a1ea-0b6c-4be2-9942-7d3bce69a42d\") " Oct 11 05:16:38 crc kubenswrapper[4651]: I1011 05:16:38.522965 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3626a1ea-0b6c-4be2-9942-7d3bce69a42d-utilities" (OuterVolumeSpecName: "utilities") pod "3626a1ea-0b6c-4be2-9942-7d3bce69a42d" (UID: "3626a1ea-0b6c-4be2-9942-7d3bce69a42d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 05:16:38 crc kubenswrapper[4651]: I1011 05:16:38.531740 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3626a1ea-0b6c-4be2-9942-7d3bce69a42d-kube-api-access-btt52" (OuterVolumeSpecName: "kube-api-access-btt52") pod "3626a1ea-0b6c-4be2-9942-7d3bce69a42d" (UID: "3626a1ea-0b6c-4be2-9942-7d3bce69a42d"). InnerVolumeSpecName "kube-api-access-btt52". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:16:38 crc kubenswrapper[4651]: I1011 05:16:38.612143 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3626a1ea-0b6c-4be2-9942-7d3bce69a42d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3626a1ea-0b6c-4be2-9942-7d3bce69a42d" (UID: "3626a1ea-0b6c-4be2-9942-7d3bce69a42d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 05:16:38 crc kubenswrapper[4651]: I1011 05:16:38.625412 4651 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3626a1ea-0b6c-4be2-9942-7d3bce69a42d-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 11 05:16:38 crc kubenswrapper[4651]: I1011 05:16:38.625479 4651 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3626a1ea-0b6c-4be2-9942-7d3bce69a42d-utilities\") on node \"crc\" DevicePath \"\"" Oct 11 05:16:38 crc kubenswrapper[4651]: I1011 05:16:38.625509 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-btt52\" (UniqueName: \"kubernetes.io/projected/3626a1ea-0b6c-4be2-9942-7d3bce69a42d-kube-api-access-btt52\") on node \"crc\" DevicePath \"\"" Oct 11 05:16:38 crc kubenswrapper[4651]: I1011 05:16:38.912208 4651 generic.go:334] "Generic (PLEG): container finished" podID="3626a1ea-0b6c-4be2-9942-7d3bce69a42d" containerID="7e819cbc61e8b71a624eb149d5350f3fed44eca67aea3130e36b23cd283f4a14" exitCode=0 Oct 11 05:16:38 crc kubenswrapper[4651]: I1011 05:16:38.912255 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5whrf" event={"ID":"3626a1ea-0b6c-4be2-9942-7d3bce69a42d","Type":"ContainerDied","Data":"7e819cbc61e8b71a624eb149d5350f3fed44eca67aea3130e36b23cd283f4a14"} Oct 11 05:16:38 crc kubenswrapper[4651]: I1011 05:16:38.912286 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5whrf" event={"ID":"3626a1ea-0b6c-4be2-9942-7d3bce69a42d","Type":"ContainerDied","Data":"03462327ac40a9f048e788833b223c738976655a7cb08634b4c240264b1a4d90"} Oct 11 05:16:38 crc kubenswrapper[4651]: I1011 05:16:38.912305 4651 scope.go:117] "RemoveContainer" containerID="7e819cbc61e8b71a624eb149d5350f3fed44eca67aea3130e36b23cd283f4a14" Oct 11 05:16:38 crc kubenswrapper[4651]: I1011 05:16:38.912475 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5whrf" Oct 11 05:16:38 crc kubenswrapper[4651]: I1011 05:16:38.953552 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-5whrf"] Oct 11 05:16:38 crc kubenswrapper[4651]: I1011 05:16:38.967435 4651 scope.go:117] "RemoveContainer" containerID="bc77ee4498d84c0a735aed7ea6610792c55375d2bfd8392bae91af57b0e73af4" Oct 11 05:16:38 crc kubenswrapper[4651]: I1011 05:16:38.970424 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-5whrf"] Oct 11 05:16:39 crc kubenswrapper[4651]: I1011 05:16:39.002452 4651 scope.go:117] "RemoveContainer" containerID="2c0de5863fc60af9fe62166fb3a257e8783c6485c3e98d76f6d9086001a30aa9" Oct 11 05:16:39 crc kubenswrapper[4651]: I1011 05:16:39.052142 4651 scope.go:117] "RemoveContainer" containerID="7e819cbc61e8b71a624eb149d5350f3fed44eca67aea3130e36b23cd283f4a14" Oct 11 05:16:39 crc kubenswrapper[4651]: E1011 05:16:39.052690 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7e819cbc61e8b71a624eb149d5350f3fed44eca67aea3130e36b23cd283f4a14\": container with ID starting with 7e819cbc61e8b71a624eb149d5350f3fed44eca67aea3130e36b23cd283f4a14 not found: ID does not exist" containerID="7e819cbc61e8b71a624eb149d5350f3fed44eca67aea3130e36b23cd283f4a14" Oct 11 05:16:39 crc kubenswrapper[4651]: I1011 05:16:39.052763 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7e819cbc61e8b71a624eb149d5350f3fed44eca67aea3130e36b23cd283f4a14"} err="failed to get container status \"7e819cbc61e8b71a624eb149d5350f3fed44eca67aea3130e36b23cd283f4a14\": rpc error: code = NotFound desc = could not find container \"7e819cbc61e8b71a624eb149d5350f3fed44eca67aea3130e36b23cd283f4a14\": container with ID starting with 7e819cbc61e8b71a624eb149d5350f3fed44eca67aea3130e36b23cd283f4a14 not found: ID does not exist" Oct 11 05:16:39 crc kubenswrapper[4651]: I1011 05:16:39.052805 4651 scope.go:117] "RemoveContainer" containerID="bc77ee4498d84c0a735aed7ea6610792c55375d2bfd8392bae91af57b0e73af4" Oct 11 05:16:39 crc kubenswrapper[4651]: E1011 05:16:39.053480 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bc77ee4498d84c0a735aed7ea6610792c55375d2bfd8392bae91af57b0e73af4\": container with ID starting with bc77ee4498d84c0a735aed7ea6610792c55375d2bfd8392bae91af57b0e73af4 not found: ID does not exist" containerID="bc77ee4498d84c0a735aed7ea6610792c55375d2bfd8392bae91af57b0e73af4" Oct 11 05:16:39 crc kubenswrapper[4651]: I1011 05:16:39.053523 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bc77ee4498d84c0a735aed7ea6610792c55375d2bfd8392bae91af57b0e73af4"} err="failed to get container status \"bc77ee4498d84c0a735aed7ea6610792c55375d2bfd8392bae91af57b0e73af4\": rpc error: code = NotFound desc = could not find container \"bc77ee4498d84c0a735aed7ea6610792c55375d2bfd8392bae91af57b0e73af4\": container with ID starting with bc77ee4498d84c0a735aed7ea6610792c55375d2bfd8392bae91af57b0e73af4 not found: ID does not exist" Oct 11 05:16:39 crc kubenswrapper[4651]: I1011 05:16:39.053588 4651 scope.go:117] "RemoveContainer" containerID="2c0de5863fc60af9fe62166fb3a257e8783c6485c3e98d76f6d9086001a30aa9" Oct 11 05:16:39 crc kubenswrapper[4651]: E1011 05:16:39.054158 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2c0de5863fc60af9fe62166fb3a257e8783c6485c3e98d76f6d9086001a30aa9\": container with ID starting with 2c0de5863fc60af9fe62166fb3a257e8783c6485c3e98d76f6d9086001a30aa9 not found: ID does not exist" containerID="2c0de5863fc60af9fe62166fb3a257e8783c6485c3e98d76f6d9086001a30aa9" Oct 11 05:16:39 crc kubenswrapper[4651]: I1011 05:16:39.054182 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2c0de5863fc60af9fe62166fb3a257e8783c6485c3e98d76f6d9086001a30aa9"} err="failed to get container status \"2c0de5863fc60af9fe62166fb3a257e8783c6485c3e98d76f6d9086001a30aa9\": rpc error: code = NotFound desc = could not find container \"2c0de5863fc60af9fe62166fb3a257e8783c6485c3e98d76f6d9086001a30aa9\": container with ID starting with 2c0de5863fc60af9fe62166fb3a257e8783c6485c3e98d76f6d9086001a30aa9 not found: ID does not exist" Oct 11 05:16:39 crc kubenswrapper[4651]: I1011 05:16:39.888874 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3626a1ea-0b6c-4be2-9942-7d3bce69a42d" path="/var/lib/kubelet/pods/3626a1ea-0b6c-4be2-9942-7d3bce69a42d/volumes" Oct 11 05:16:46 crc kubenswrapper[4651]: I1011 05:16:46.309938 4651 patch_prober.go:28] interesting pod/machine-config-daemon-78jnv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 05:16:46 crc kubenswrapper[4651]: I1011 05:16:46.310354 4651 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 05:16:47 crc kubenswrapper[4651]: I1011 05:16:47.069473 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-lff65"] Oct 11 05:16:47 crc kubenswrapper[4651]: I1011 05:16:47.084207 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-lff65"] Oct 11 05:16:47 crc kubenswrapper[4651]: I1011 05:16:47.899424 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7f0c06fb-d21f-42e0-ad0d-a54ba7cea292" path="/var/lib/kubelet/pods/7f0c06fb-d21f-42e0-ad0d-a54ba7cea292/volumes" Oct 11 05:16:48 crc kubenswrapper[4651]: I1011 05:16:48.030424 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-5gv5n"] Oct 11 05:16:48 crc kubenswrapper[4651]: I1011 05:16:48.041379 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-5gv5n"] Oct 11 05:16:49 crc kubenswrapper[4651]: I1011 05:16:49.908214 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b502fe10-f784-49d8-8b56-b3384c15f4f7" path="/var/lib/kubelet/pods/b502fe10-f784-49d8-8b56-b3384c15f4f7/volumes" Oct 11 05:16:57 crc kubenswrapper[4651]: I1011 05:16:57.027367 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-8zck6"] Oct 11 05:16:57 crc kubenswrapper[4651]: I1011 05:16:57.041560 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-8zck6"] Oct 11 05:16:57 crc kubenswrapper[4651]: I1011 05:16:57.890186 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7890e4dd-435a-4c55-85a0-71bd4ccac0a6" path="/var/lib/kubelet/pods/7890e4dd-435a-4c55-85a0-71bd4ccac0a6/volumes" Oct 11 05:17:00 crc kubenswrapper[4651]: I1011 05:17:00.023175 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-c468-account-create-zcb7t"] Oct 11 05:17:00 crc kubenswrapper[4651]: I1011 05:17:00.031995 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-c468-account-create-zcb7t"] Oct 11 05:17:00 crc kubenswrapper[4651]: I1011 05:17:00.042976 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-a6cc-account-create-flc4v"] Oct 11 05:17:00 crc kubenswrapper[4651]: I1011 05:17:00.050244 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-a6cc-account-create-flc4v"] Oct 11 05:17:01 crc kubenswrapper[4651]: I1011 05:17:01.882670 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1898b88c-a558-4880-9502-11e9175555ea" path="/var/lib/kubelet/pods/1898b88c-a558-4880-9502-11e9175555ea/volumes" Oct 11 05:17:01 crc kubenswrapper[4651]: I1011 05:17:01.883272 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6bfca2f-54e4-46e5-9c51-29f08bc730d1" path="/var/lib/kubelet/pods/b6bfca2f-54e4-46e5-9c51-29f08bc730d1/volumes" Oct 11 05:17:03 crc kubenswrapper[4651]: I1011 05:17:03.044635 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-1187-account-create-pmkbh"] Oct 11 05:17:03 crc kubenswrapper[4651]: I1011 05:17:03.057552 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-1187-account-create-pmkbh"] Oct 11 05:17:03 crc kubenswrapper[4651]: I1011 05:17:03.889608 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fa772ad4-2dcf-4af4-9cea-a0c7cb57557d" path="/var/lib/kubelet/pods/fa772ad4-2dcf-4af4-9cea-a0c7cb57557d/volumes" Oct 11 05:17:16 crc kubenswrapper[4651]: I1011 05:17:16.310779 4651 patch_prober.go:28] interesting pod/machine-config-daemon-78jnv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 05:17:16 crc kubenswrapper[4651]: I1011 05:17:16.311163 4651 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 05:17:16 crc kubenswrapper[4651]: I1011 05:17:16.311205 4651 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" Oct 11 05:17:16 crc kubenswrapper[4651]: I1011 05:17:16.311849 4651 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"9e22517d26caf83c6f2aa3910c4d04d694ebd6ca37f48f88ab2ba796ba35e6e7"} pod="openshift-machine-config-operator/machine-config-daemon-78jnv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 11 05:17:16 crc kubenswrapper[4651]: I1011 05:17:16.311901 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" containerName="machine-config-daemon" containerID="cri-o://9e22517d26caf83c6f2aa3910c4d04d694ebd6ca37f48f88ab2ba796ba35e6e7" gracePeriod=600 Oct 11 05:17:16 crc kubenswrapper[4651]: E1011 05:17:16.438243 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 05:17:17 crc kubenswrapper[4651]: I1011 05:17:17.301923 4651 generic.go:334] "Generic (PLEG): container finished" podID="519a1ae1-e964-48b0-8b61-835146df28c1" containerID="9e22517d26caf83c6f2aa3910c4d04d694ebd6ca37f48f88ab2ba796ba35e6e7" exitCode=0 Oct 11 05:17:17 crc kubenswrapper[4651]: I1011 05:17:17.301972 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" event={"ID":"519a1ae1-e964-48b0-8b61-835146df28c1","Type":"ContainerDied","Data":"9e22517d26caf83c6f2aa3910c4d04d694ebd6ca37f48f88ab2ba796ba35e6e7"} Oct 11 05:17:17 crc kubenswrapper[4651]: I1011 05:17:17.302008 4651 scope.go:117] "RemoveContainer" containerID="fdfc9e3f19e3d1ca9aaf28ab30da5e0edfd8b9e029feb471e05d551727bd0ad1" Oct 11 05:17:17 crc kubenswrapper[4651]: I1011 05:17:17.303684 4651 scope.go:117] "RemoveContainer" containerID="9e22517d26caf83c6f2aa3910c4d04d694ebd6ca37f48f88ab2ba796ba35e6e7" Oct 11 05:17:17 crc kubenswrapper[4651]: E1011 05:17:17.304365 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 05:17:25 crc kubenswrapper[4651]: I1011 05:17:25.051211 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-n4zlv"] Oct 11 05:17:25 crc kubenswrapper[4651]: I1011 05:17:25.060870 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-bbb7v"] Oct 11 05:17:25 crc kubenswrapper[4651]: I1011 05:17:25.069495 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-mkkvr"] Oct 11 05:17:25 crc kubenswrapper[4651]: I1011 05:17:25.076513 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-n4zlv"] Oct 11 05:17:25 crc kubenswrapper[4651]: I1011 05:17:25.082683 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-mkkvr"] Oct 11 05:17:25 crc kubenswrapper[4651]: I1011 05:17:25.089730 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-bbb7v"] Oct 11 05:17:25 crc kubenswrapper[4651]: I1011 05:17:25.882762 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a968fd78-9f70-4494-8d24-604cf4a4d8a1" path="/var/lib/kubelet/pods/a968fd78-9f70-4494-8d24-604cf4a4d8a1/volumes" Oct 11 05:17:25 crc kubenswrapper[4651]: I1011 05:17:25.883495 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d8996cb1-d92c-4ec2-96b3-0aa6f643c3da" path="/var/lib/kubelet/pods/d8996cb1-d92c-4ec2-96b3-0aa6f643c3da/volumes" Oct 11 05:17:25 crc kubenswrapper[4651]: I1011 05:17:25.884320 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e88f1ae5-9219-4c23-b6e2-a115005a1011" path="/var/lib/kubelet/pods/e88f1ae5-9219-4c23-b6e2-a115005a1011/volumes" Oct 11 05:17:27 crc kubenswrapper[4651]: I1011 05:17:27.871193 4651 scope.go:117] "RemoveContainer" containerID="9e22517d26caf83c6f2aa3910c4d04d694ebd6ca37f48f88ab2ba796ba35e6e7" Oct 11 05:17:27 crc kubenswrapper[4651]: E1011 05:17:27.872097 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 05:17:32 crc kubenswrapper[4651]: I1011 05:17:32.043407 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-j5wrj"] Oct 11 05:17:32 crc kubenswrapper[4651]: I1011 05:17:32.062880 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-j5wrj"] Oct 11 05:17:32 crc kubenswrapper[4651]: I1011 05:17:32.793020 4651 scope.go:117] "RemoveContainer" containerID="811c4dd8df694f8ba0319b1f3d9bd303bb9c090463c57a39f44149b08756bb28" Oct 11 05:17:32 crc kubenswrapper[4651]: I1011 05:17:32.837465 4651 scope.go:117] "RemoveContainer" containerID="ca5b2fe537d7de541b37559141ad1fef14b5b5ca8227cbc0512f86b8dc0faaf0" Oct 11 05:17:32 crc kubenswrapper[4651]: I1011 05:17:32.905716 4651 scope.go:117] "RemoveContainer" containerID="1c0792869a03f977cada24728a5defddd81033af56785de4c6b4e4194e821447" Oct 11 05:17:32 crc kubenswrapper[4651]: I1011 05:17:32.972919 4651 scope.go:117] "RemoveContainer" containerID="b532309e0ff6a4f98e0e731ccf5914ba1cbaf5a40f9ab1e9b374e0c9f709f125" Oct 11 05:17:33 crc kubenswrapper[4651]: I1011 05:17:33.010279 4651 scope.go:117] "RemoveContainer" containerID="55b85f7500414ae562749bb5078422e7bb4280994c7a6aa499d10dbf080111cb" Oct 11 05:17:33 crc kubenswrapper[4651]: I1011 05:17:33.048138 4651 scope.go:117] "RemoveContainer" containerID="a4992c024d0d86a07ee71184074329012f128a9e5697a8fbf9b50d02ed033b57" Oct 11 05:17:33 crc kubenswrapper[4651]: I1011 05:17:33.089585 4651 scope.go:117] "RemoveContainer" containerID="6431114d06a5d46b7d8f1f38cba284b6ee35a0e03e734502f422a31ba1a761ed" Oct 11 05:17:33 crc kubenswrapper[4651]: I1011 05:17:33.119237 4651 scope.go:117] "RemoveContainer" containerID="f8b6b2f56e80c4bfd3a9370cef23e7b470d388e9facffec9baccb9273cca0d33" Oct 11 05:17:33 crc kubenswrapper[4651]: I1011 05:17:33.141432 4651 scope.go:117] "RemoveContainer" containerID="8d2ac2e46cd43ac79eb33b8368a757f4b45376885b8476503c15e0c05de24c2c" Oct 11 05:17:33 crc kubenswrapper[4651]: I1011 05:17:33.881401 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d7e37fff-86fd-435f-b124-27f7c2afb74d" path="/var/lib/kubelet/pods/d7e37fff-86fd-435f-b124-27f7c2afb74d/volumes" Oct 11 05:17:35 crc kubenswrapper[4651]: I1011 05:17:35.034215 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-5e94-account-create-qxkc6"] Oct 11 05:17:35 crc kubenswrapper[4651]: I1011 05:17:35.044737 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-921e-account-create-p5bgn"] Oct 11 05:17:35 crc kubenswrapper[4651]: I1011 05:17:35.056014 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-2878-account-create-bqv6m"] Oct 11 05:17:35 crc kubenswrapper[4651]: I1011 05:17:35.065264 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-921e-account-create-p5bgn"] Oct 11 05:17:35 crc kubenswrapper[4651]: I1011 05:17:35.074088 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-2878-account-create-bqv6m"] Oct 11 05:17:35 crc kubenswrapper[4651]: I1011 05:17:35.081108 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-5e94-account-create-qxkc6"] Oct 11 05:17:35 crc kubenswrapper[4651]: I1011 05:17:35.883518 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="286d8877-76aa-4258-a93d-da719dda8143" path="/var/lib/kubelet/pods/286d8877-76aa-4258-a93d-da719dda8143/volumes" Oct 11 05:17:35 crc kubenswrapper[4651]: I1011 05:17:35.884409 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ad2c489-03ce-4435-8b12-4a7f77d12c95" path="/var/lib/kubelet/pods/3ad2c489-03ce-4435-8b12-4a7f77d12c95/volumes" Oct 11 05:17:35 crc kubenswrapper[4651]: I1011 05:17:35.885090 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8a197d94-8ca4-4992-9bb9-f6c6d42a1351" path="/var/lib/kubelet/pods/8a197d94-8ca4-4992-9bb9-f6c6d42a1351/volumes" Oct 11 05:17:36 crc kubenswrapper[4651]: I1011 05:17:36.051319 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-ktc4s"] Oct 11 05:17:36 crc kubenswrapper[4651]: I1011 05:17:36.065527 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-ktc4s"] Oct 11 05:17:37 crc kubenswrapper[4651]: I1011 05:17:37.889226 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f0c34b4f-c8ab-4940-9c75-82d4b4e6988c" path="/var/lib/kubelet/pods/f0c34b4f-c8ab-4940-9c75-82d4b4e6988c/volumes" Oct 11 05:17:39 crc kubenswrapper[4651]: I1011 05:17:39.880925 4651 scope.go:117] "RemoveContainer" containerID="9e22517d26caf83c6f2aa3910c4d04d694ebd6ca37f48f88ab2ba796ba35e6e7" Oct 11 05:17:39 crc kubenswrapper[4651]: E1011 05:17:39.881379 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 05:17:53 crc kubenswrapper[4651]: I1011 05:17:53.692342 4651 generic.go:334] "Generic (PLEG): container finished" podID="0428933e-bd0d-4be4-94a6-25caf11d1f23" containerID="6e82b0f94dda821748307873615fb9beff0fc0ee3ba6c7cbc0a57286fb950362" exitCode=0 Oct 11 05:17:53 crc kubenswrapper[4651]: I1011 05:17:53.692479 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-xqtc2" event={"ID":"0428933e-bd0d-4be4-94a6-25caf11d1f23","Type":"ContainerDied","Data":"6e82b0f94dda821748307873615fb9beff0fc0ee3ba6c7cbc0a57286fb950362"} Oct 11 05:17:54 crc kubenswrapper[4651]: I1011 05:17:54.872973 4651 scope.go:117] "RemoveContainer" containerID="9e22517d26caf83c6f2aa3910c4d04d694ebd6ca37f48f88ab2ba796ba35e6e7" Oct 11 05:17:54 crc kubenswrapper[4651]: E1011 05:17:54.873514 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 05:17:55 crc kubenswrapper[4651]: I1011 05:17:55.194161 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-xqtc2" Oct 11 05:17:55 crc kubenswrapper[4651]: I1011 05:17:55.315889 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p87hp\" (UniqueName: \"kubernetes.io/projected/0428933e-bd0d-4be4-94a6-25caf11d1f23-kube-api-access-p87hp\") pod \"0428933e-bd0d-4be4-94a6-25caf11d1f23\" (UID: \"0428933e-bd0d-4be4-94a6-25caf11d1f23\") " Oct 11 05:17:55 crc kubenswrapper[4651]: I1011 05:17:55.316321 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0428933e-bd0d-4be4-94a6-25caf11d1f23-ssh-key\") pod \"0428933e-bd0d-4be4-94a6-25caf11d1f23\" (UID: \"0428933e-bd0d-4be4-94a6-25caf11d1f23\") " Oct 11 05:17:55 crc kubenswrapper[4651]: I1011 05:17:55.316472 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0428933e-bd0d-4be4-94a6-25caf11d1f23-inventory\") pod \"0428933e-bd0d-4be4-94a6-25caf11d1f23\" (UID: \"0428933e-bd0d-4be4-94a6-25caf11d1f23\") " Oct 11 05:17:55 crc kubenswrapper[4651]: I1011 05:17:55.324054 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0428933e-bd0d-4be4-94a6-25caf11d1f23-kube-api-access-p87hp" (OuterVolumeSpecName: "kube-api-access-p87hp") pod "0428933e-bd0d-4be4-94a6-25caf11d1f23" (UID: "0428933e-bd0d-4be4-94a6-25caf11d1f23"). InnerVolumeSpecName "kube-api-access-p87hp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:17:55 crc kubenswrapper[4651]: I1011 05:17:55.343580 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0428933e-bd0d-4be4-94a6-25caf11d1f23-inventory" (OuterVolumeSpecName: "inventory") pod "0428933e-bd0d-4be4-94a6-25caf11d1f23" (UID: "0428933e-bd0d-4be4-94a6-25caf11d1f23"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:17:55 crc kubenswrapper[4651]: I1011 05:17:55.380080 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0428933e-bd0d-4be4-94a6-25caf11d1f23-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "0428933e-bd0d-4be4-94a6-25caf11d1f23" (UID: "0428933e-bd0d-4be4-94a6-25caf11d1f23"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:17:55 crc kubenswrapper[4651]: I1011 05:17:55.419096 4651 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0428933e-bd0d-4be4-94a6-25caf11d1f23-inventory\") on node \"crc\" DevicePath \"\"" Oct 11 05:17:55 crc kubenswrapper[4651]: I1011 05:17:55.419153 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p87hp\" (UniqueName: \"kubernetes.io/projected/0428933e-bd0d-4be4-94a6-25caf11d1f23-kube-api-access-p87hp\") on node \"crc\" DevicePath \"\"" Oct 11 05:17:55 crc kubenswrapper[4651]: I1011 05:17:55.419174 4651 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0428933e-bd0d-4be4-94a6-25caf11d1f23-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 11 05:17:55 crc kubenswrapper[4651]: I1011 05:17:55.756542 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-xqtc2" event={"ID":"0428933e-bd0d-4be4-94a6-25caf11d1f23","Type":"ContainerDied","Data":"fba12f3a4bf92c2af7924934509bde51d39b28720844cd2a7a442529202c2b6b"} Oct 11 05:17:55 crc kubenswrapper[4651]: I1011 05:17:55.756584 4651 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fba12f3a4bf92c2af7924934509bde51d39b28720844cd2a7a442529202c2b6b" Oct 11 05:17:55 crc kubenswrapper[4651]: I1011 05:17:55.756631 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-xqtc2" Oct 11 05:17:55 crc kubenswrapper[4651]: I1011 05:17:55.805990 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jq2ff"] Oct 11 05:17:55 crc kubenswrapper[4651]: E1011 05:17:55.806362 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0428933e-bd0d-4be4-94a6-25caf11d1f23" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Oct 11 05:17:55 crc kubenswrapper[4651]: I1011 05:17:55.806377 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="0428933e-bd0d-4be4-94a6-25caf11d1f23" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Oct 11 05:17:55 crc kubenswrapper[4651]: E1011 05:17:55.806388 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3626a1ea-0b6c-4be2-9942-7d3bce69a42d" containerName="registry-server" Oct 11 05:17:55 crc kubenswrapper[4651]: I1011 05:17:55.806394 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="3626a1ea-0b6c-4be2-9942-7d3bce69a42d" containerName="registry-server" Oct 11 05:17:55 crc kubenswrapper[4651]: E1011 05:17:55.806416 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3626a1ea-0b6c-4be2-9942-7d3bce69a42d" containerName="extract-utilities" Oct 11 05:17:55 crc kubenswrapper[4651]: I1011 05:17:55.806422 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="3626a1ea-0b6c-4be2-9942-7d3bce69a42d" containerName="extract-utilities" Oct 11 05:17:55 crc kubenswrapper[4651]: E1011 05:17:55.806438 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3626a1ea-0b6c-4be2-9942-7d3bce69a42d" containerName="extract-content" Oct 11 05:17:55 crc kubenswrapper[4651]: I1011 05:17:55.806444 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="3626a1ea-0b6c-4be2-9942-7d3bce69a42d" containerName="extract-content" Oct 11 05:17:55 crc kubenswrapper[4651]: I1011 05:17:55.806608 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="3626a1ea-0b6c-4be2-9942-7d3bce69a42d" containerName="registry-server" Oct 11 05:17:55 crc kubenswrapper[4651]: I1011 05:17:55.806626 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="0428933e-bd0d-4be4-94a6-25caf11d1f23" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Oct 11 05:17:55 crc kubenswrapper[4651]: I1011 05:17:55.807446 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jq2ff" Oct 11 05:17:55 crc kubenswrapper[4651]: I1011 05:17:55.810680 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 11 05:17:55 crc kubenswrapper[4651]: I1011 05:17:55.810798 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 11 05:17:55 crc kubenswrapper[4651]: I1011 05:17:55.811055 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 11 05:17:55 crc kubenswrapper[4651]: I1011 05:17:55.811891 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-r489p" Oct 11 05:17:55 crc kubenswrapper[4651]: I1011 05:17:55.824604 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jq2ff"] Oct 11 05:17:55 crc kubenswrapper[4651]: I1011 05:17:55.929444 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-crnkf\" (UniqueName: \"kubernetes.io/projected/6634acd3-8550-4286-ad94-004cfe4c7def-kube-api-access-crnkf\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-jq2ff\" (UID: \"6634acd3-8550-4286-ad94-004cfe4c7def\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jq2ff" Oct 11 05:17:55 crc kubenswrapper[4651]: I1011 05:17:55.929524 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6634acd3-8550-4286-ad94-004cfe4c7def-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-jq2ff\" (UID: \"6634acd3-8550-4286-ad94-004cfe4c7def\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jq2ff" Oct 11 05:17:55 crc kubenswrapper[4651]: I1011 05:17:55.929636 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6634acd3-8550-4286-ad94-004cfe4c7def-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-jq2ff\" (UID: \"6634acd3-8550-4286-ad94-004cfe4c7def\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jq2ff" Oct 11 05:17:56 crc kubenswrapper[4651]: I1011 05:17:56.031972 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6634acd3-8550-4286-ad94-004cfe4c7def-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-jq2ff\" (UID: \"6634acd3-8550-4286-ad94-004cfe4c7def\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jq2ff" Oct 11 05:17:56 crc kubenswrapper[4651]: I1011 05:17:56.032233 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-crnkf\" (UniqueName: \"kubernetes.io/projected/6634acd3-8550-4286-ad94-004cfe4c7def-kube-api-access-crnkf\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-jq2ff\" (UID: \"6634acd3-8550-4286-ad94-004cfe4c7def\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jq2ff" Oct 11 05:17:56 crc kubenswrapper[4651]: I1011 05:17:56.032334 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6634acd3-8550-4286-ad94-004cfe4c7def-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-jq2ff\" (UID: \"6634acd3-8550-4286-ad94-004cfe4c7def\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jq2ff" Oct 11 05:17:56 crc kubenswrapper[4651]: I1011 05:17:56.035942 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6634acd3-8550-4286-ad94-004cfe4c7def-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-jq2ff\" (UID: \"6634acd3-8550-4286-ad94-004cfe4c7def\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jq2ff" Oct 11 05:17:56 crc kubenswrapper[4651]: I1011 05:17:56.036381 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6634acd3-8550-4286-ad94-004cfe4c7def-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-jq2ff\" (UID: \"6634acd3-8550-4286-ad94-004cfe4c7def\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jq2ff" Oct 11 05:17:56 crc kubenswrapper[4651]: I1011 05:17:56.051249 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-crnkf\" (UniqueName: \"kubernetes.io/projected/6634acd3-8550-4286-ad94-004cfe4c7def-kube-api-access-crnkf\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-jq2ff\" (UID: \"6634acd3-8550-4286-ad94-004cfe4c7def\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jq2ff" Oct 11 05:17:56 crc kubenswrapper[4651]: I1011 05:17:56.129783 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jq2ff" Oct 11 05:17:56 crc kubenswrapper[4651]: I1011 05:17:56.751407 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jq2ff"] Oct 11 05:17:56 crc kubenswrapper[4651]: I1011 05:17:56.756647 4651 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 11 05:17:56 crc kubenswrapper[4651]: I1011 05:17:56.765306 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jq2ff" event={"ID":"6634acd3-8550-4286-ad94-004cfe4c7def","Type":"ContainerStarted","Data":"daafa7af152b68a2b09a0defd7b06e1ae6b4c30183df9cd9cabf494fe1100425"} Oct 11 05:17:57 crc kubenswrapper[4651]: I1011 05:17:57.777628 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jq2ff" event={"ID":"6634acd3-8550-4286-ad94-004cfe4c7def","Type":"ContainerStarted","Data":"081a708d5ef584b9fb7fcfaf2484d6de9378ea39b1b95800d05678c3d2eb7885"} Oct 11 05:17:57 crc kubenswrapper[4651]: I1011 05:17:57.794495 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jq2ff" podStartSLOduration=2.273364671 podStartE2EDuration="2.794437409s" podCreationTimestamp="2025-10-11 05:17:55 +0000 UTC" firstStartedPulling="2025-10-11 05:17:56.75626761 +0000 UTC m=+1597.652500426" lastFinishedPulling="2025-10-11 05:17:57.277340328 +0000 UTC m=+1598.173573164" observedRunningTime="2025-10-11 05:17:57.791514002 +0000 UTC m=+1598.687746858" watchObservedRunningTime="2025-10-11 05:17:57.794437409 +0000 UTC m=+1598.690670245" Oct 11 05:18:07 crc kubenswrapper[4651]: I1011 05:18:07.045459 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-zvfkb"] Oct 11 05:18:07 crc kubenswrapper[4651]: I1011 05:18:07.055150 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-zvfkb"] Oct 11 05:18:07 crc kubenswrapper[4651]: I1011 05:18:07.870686 4651 scope.go:117] "RemoveContainer" containerID="9e22517d26caf83c6f2aa3910c4d04d694ebd6ca37f48f88ab2ba796ba35e6e7" Oct 11 05:18:07 crc kubenswrapper[4651]: E1011 05:18:07.871227 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 05:18:07 crc kubenswrapper[4651]: I1011 05:18:07.892247 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b35ecebc-5355-4ad7-bf37-0d288eed4fdc" path="/var/lib/kubelet/pods/b35ecebc-5355-4ad7-bf37-0d288eed4fdc/volumes" Oct 11 05:18:18 crc kubenswrapper[4651]: I1011 05:18:18.870109 4651 scope.go:117] "RemoveContainer" containerID="9e22517d26caf83c6f2aa3910c4d04d694ebd6ca37f48f88ab2ba796ba35e6e7" Oct 11 05:18:18 crc kubenswrapper[4651]: E1011 05:18:18.871482 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 05:18:19 crc kubenswrapper[4651]: I1011 05:18:19.040138 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-9rf4f"] Oct 11 05:18:19 crc kubenswrapper[4651]: I1011 05:18:19.050587 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-9rf4f"] Oct 11 05:18:19 crc kubenswrapper[4651]: I1011 05:18:19.885673 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="261ffa67-4305-4260-903d-93b8af576721" path="/var/lib/kubelet/pods/261ffa67-4305-4260-903d-93b8af576721/volumes" Oct 11 05:18:27 crc kubenswrapper[4651]: I1011 05:18:27.037181 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-ctfxs"] Oct 11 05:18:27 crc kubenswrapper[4651]: I1011 05:18:27.053938 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-chq8s"] Oct 11 05:18:27 crc kubenswrapper[4651]: I1011 05:18:27.064844 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-ctfxs"] Oct 11 05:18:27 crc kubenswrapper[4651]: I1011 05:18:27.074326 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-chq8s"] Oct 11 05:18:27 crc kubenswrapper[4651]: I1011 05:18:27.892440 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4f425c3a-376b-4ba1-8066-96b2d1f21698" path="/var/lib/kubelet/pods/4f425c3a-376b-4ba1-8066-96b2d1f21698/volumes" Oct 11 05:18:27 crc kubenswrapper[4651]: I1011 05:18:27.894219 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96c528a9-d9c6-4eec-b63f-5bba189744ae" path="/var/lib/kubelet/pods/96c528a9-d9c6-4eec-b63f-5bba189744ae/volumes" Oct 11 05:18:33 crc kubenswrapper[4651]: I1011 05:18:33.347977 4651 scope.go:117] "RemoveContainer" containerID="5063e393f084e925c50d94f5fe68ea874eea10559384d86483c21265911bee04" Oct 11 05:18:33 crc kubenswrapper[4651]: I1011 05:18:33.411010 4651 scope.go:117] "RemoveContainer" containerID="ec38fe3c55d9354a2bef7aab13271702e6dca918bb369a2bd1f492631bcf64e3" Oct 11 05:18:33 crc kubenswrapper[4651]: I1011 05:18:33.453599 4651 scope.go:117] "RemoveContainer" containerID="7e749e6c5e39c8b229cde18437b792d4641bdca4e4c93ef3ba37af438e266041" Oct 11 05:18:33 crc kubenswrapper[4651]: I1011 05:18:33.540651 4651 scope.go:117] "RemoveContainer" containerID="9ce0c8b78d59e4cc58fe7fcc774cd7271869fa41a4e09f7e1fa4d0c2a13fd3fd" Oct 11 05:18:33 crc kubenswrapper[4651]: I1011 05:18:33.570785 4651 scope.go:117] "RemoveContainer" containerID="5fdd10400e07e88a06a5fdb658a6364edb433042dc71c45303e4ee46efa5fd26" Oct 11 05:18:33 crc kubenswrapper[4651]: I1011 05:18:33.632318 4651 scope.go:117] "RemoveContainer" containerID="dc39a8e7d062d4ad68ccdd4434f8ebd500d40b930224b8075357dab447b1bd22" Oct 11 05:18:33 crc kubenswrapper[4651]: I1011 05:18:33.688810 4651 scope.go:117] "RemoveContainer" containerID="032cd564ff4bc5da969f0967080218c4a351342a2e6fab664f11896488b435fa" Oct 11 05:18:33 crc kubenswrapper[4651]: I1011 05:18:33.723244 4651 scope.go:117] "RemoveContainer" containerID="45d0d53f261a983c6d235cb866663f1dd095110892989e5cfa4d5f08e9ebd055" Oct 11 05:18:33 crc kubenswrapper[4651]: I1011 05:18:33.745905 4651 scope.go:117] "RemoveContainer" containerID="9b3929e4a24444e7ad80fa4b08644d48ced2c503be1cd6e36a2ae7c2b6ad7b47" Oct 11 05:18:33 crc kubenswrapper[4651]: I1011 05:18:33.869973 4651 scope.go:117] "RemoveContainer" containerID="9e22517d26caf83c6f2aa3910c4d04d694ebd6ca37f48f88ab2ba796ba35e6e7" Oct 11 05:18:33 crc kubenswrapper[4651]: E1011 05:18:33.870707 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 05:18:36 crc kubenswrapper[4651]: I1011 05:18:36.037633 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-8rxgw"] Oct 11 05:18:36 crc kubenswrapper[4651]: I1011 05:18:36.046908 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-8rxgw"] Oct 11 05:18:37 crc kubenswrapper[4651]: I1011 05:18:37.883768 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="36b45d75-4e52-49b7-b7d7-13d53d2f7076" path="/var/lib/kubelet/pods/36b45d75-4e52-49b7-b7d7-13d53d2f7076/volumes" Oct 11 05:18:48 crc kubenswrapper[4651]: I1011 05:18:48.869604 4651 scope.go:117] "RemoveContainer" containerID="9e22517d26caf83c6f2aa3910c4d04d694ebd6ca37f48f88ab2ba796ba35e6e7" Oct 11 05:18:48 crc kubenswrapper[4651]: E1011 05:18:48.870652 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 05:18:59 crc kubenswrapper[4651]: I1011 05:18:59.882200 4651 scope.go:117] "RemoveContainer" containerID="9e22517d26caf83c6f2aa3910c4d04d694ebd6ca37f48f88ab2ba796ba35e6e7" Oct 11 05:18:59 crc kubenswrapper[4651]: E1011 05:18:59.883582 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 05:19:12 crc kubenswrapper[4651]: I1011 05:19:12.870462 4651 scope.go:117] "RemoveContainer" containerID="9e22517d26caf83c6f2aa3910c4d04d694ebd6ca37f48f88ab2ba796ba35e6e7" Oct 11 05:19:12 crc kubenswrapper[4651]: E1011 05:19:12.873556 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 05:19:17 crc kubenswrapper[4651]: I1011 05:19:17.670127 4651 generic.go:334] "Generic (PLEG): container finished" podID="6634acd3-8550-4286-ad94-004cfe4c7def" containerID="081a708d5ef584b9fb7fcfaf2484d6de9378ea39b1b95800d05678c3d2eb7885" exitCode=0 Oct 11 05:19:17 crc kubenswrapper[4651]: I1011 05:19:17.670226 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jq2ff" event={"ID":"6634acd3-8550-4286-ad94-004cfe4c7def","Type":"ContainerDied","Data":"081a708d5ef584b9fb7fcfaf2484d6de9378ea39b1b95800d05678c3d2eb7885"} Oct 11 05:19:19 crc kubenswrapper[4651]: I1011 05:19:19.273549 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jq2ff" Oct 11 05:19:19 crc kubenswrapper[4651]: I1011 05:19:19.452185 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6634acd3-8550-4286-ad94-004cfe4c7def-inventory\") pod \"6634acd3-8550-4286-ad94-004cfe4c7def\" (UID: \"6634acd3-8550-4286-ad94-004cfe4c7def\") " Oct 11 05:19:19 crc kubenswrapper[4651]: I1011 05:19:19.452356 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-crnkf\" (UniqueName: \"kubernetes.io/projected/6634acd3-8550-4286-ad94-004cfe4c7def-kube-api-access-crnkf\") pod \"6634acd3-8550-4286-ad94-004cfe4c7def\" (UID: \"6634acd3-8550-4286-ad94-004cfe4c7def\") " Oct 11 05:19:19 crc kubenswrapper[4651]: I1011 05:19:19.452548 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6634acd3-8550-4286-ad94-004cfe4c7def-ssh-key\") pod \"6634acd3-8550-4286-ad94-004cfe4c7def\" (UID: \"6634acd3-8550-4286-ad94-004cfe4c7def\") " Oct 11 05:19:19 crc kubenswrapper[4651]: I1011 05:19:19.458470 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6634acd3-8550-4286-ad94-004cfe4c7def-kube-api-access-crnkf" (OuterVolumeSpecName: "kube-api-access-crnkf") pod "6634acd3-8550-4286-ad94-004cfe4c7def" (UID: "6634acd3-8550-4286-ad94-004cfe4c7def"). InnerVolumeSpecName "kube-api-access-crnkf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:19:19 crc kubenswrapper[4651]: I1011 05:19:19.481640 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6634acd3-8550-4286-ad94-004cfe4c7def-inventory" (OuterVolumeSpecName: "inventory") pod "6634acd3-8550-4286-ad94-004cfe4c7def" (UID: "6634acd3-8550-4286-ad94-004cfe4c7def"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:19:19 crc kubenswrapper[4651]: I1011 05:19:19.502364 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6634acd3-8550-4286-ad94-004cfe4c7def-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "6634acd3-8550-4286-ad94-004cfe4c7def" (UID: "6634acd3-8550-4286-ad94-004cfe4c7def"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:19:19 crc kubenswrapper[4651]: I1011 05:19:19.555402 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-crnkf\" (UniqueName: \"kubernetes.io/projected/6634acd3-8550-4286-ad94-004cfe4c7def-kube-api-access-crnkf\") on node \"crc\" DevicePath \"\"" Oct 11 05:19:19 crc kubenswrapper[4651]: I1011 05:19:19.555472 4651 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6634acd3-8550-4286-ad94-004cfe4c7def-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 11 05:19:19 crc kubenswrapper[4651]: I1011 05:19:19.555485 4651 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6634acd3-8550-4286-ad94-004cfe4c7def-inventory\") on node \"crc\" DevicePath \"\"" Oct 11 05:19:19 crc kubenswrapper[4651]: I1011 05:19:19.694648 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jq2ff" event={"ID":"6634acd3-8550-4286-ad94-004cfe4c7def","Type":"ContainerDied","Data":"daafa7af152b68a2b09a0defd7b06e1ae6b4c30183df9cd9cabf494fe1100425"} Oct 11 05:19:19 crc kubenswrapper[4651]: I1011 05:19:19.695077 4651 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="daafa7af152b68a2b09a0defd7b06e1ae6b4c30183df9cd9cabf494fe1100425" Oct 11 05:19:19 crc kubenswrapper[4651]: I1011 05:19:19.694749 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jq2ff" Oct 11 05:19:19 crc kubenswrapper[4651]: I1011 05:19:19.799997 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-x9lqn"] Oct 11 05:19:19 crc kubenswrapper[4651]: E1011 05:19:19.800498 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6634acd3-8550-4286-ad94-004cfe4c7def" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Oct 11 05:19:19 crc kubenswrapper[4651]: I1011 05:19:19.800521 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="6634acd3-8550-4286-ad94-004cfe4c7def" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Oct 11 05:19:19 crc kubenswrapper[4651]: I1011 05:19:19.800794 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="6634acd3-8550-4286-ad94-004cfe4c7def" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Oct 11 05:19:19 crc kubenswrapper[4651]: I1011 05:19:19.801628 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-x9lqn" Oct 11 05:19:19 crc kubenswrapper[4651]: I1011 05:19:19.804427 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 11 05:19:19 crc kubenswrapper[4651]: I1011 05:19:19.804731 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 11 05:19:19 crc kubenswrapper[4651]: I1011 05:19:19.804775 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 11 05:19:19 crc kubenswrapper[4651]: I1011 05:19:19.805318 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-r489p" Oct 11 05:19:19 crc kubenswrapper[4651]: I1011 05:19:19.813723 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-x9lqn"] Oct 11 05:19:19 crc kubenswrapper[4651]: I1011 05:19:19.965045 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1187c352-70c0-4b8f-a7fa-300e4093c60c-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-x9lqn\" (UID: \"1187c352-70c0-4b8f-a7fa-300e4093c60c\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-x9lqn" Oct 11 05:19:19 crc kubenswrapper[4651]: I1011 05:19:19.965859 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mcmh7\" (UniqueName: \"kubernetes.io/projected/1187c352-70c0-4b8f-a7fa-300e4093c60c-kube-api-access-mcmh7\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-x9lqn\" (UID: \"1187c352-70c0-4b8f-a7fa-300e4093c60c\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-x9lqn" Oct 11 05:19:19 crc kubenswrapper[4651]: I1011 05:19:19.965926 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1187c352-70c0-4b8f-a7fa-300e4093c60c-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-x9lqn\" (UID: \"1187c352-70c0-4b8f-a7fa-300e4093c60c\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-x9lqn" Oct 11 05:19:20 crc kubenswrapper[4651]: I1011 05:19:20.067968 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mcmh7\" (UniqueName: \"kubernetes.io/projected/1187c352-70c0-4b8f-a7fa-300e4093c60c-kube-api-access-mcmh7\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-x9lqn\" (UID: \"1187c352-70c0-4b8f-a7fa-300e4093c60c\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-x9lqn" Oct 11 05:19:20 crc kubenswrapper[4651]: I1011 05:19:20.068026 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1187c352-70c0-4b8f-a7fa-300e4093c60c-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-x9lqn\" (UID: \"1187c352-70c0-4b8f-a7fa-300e4093c60c\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-x9lqn" Oct 11 05:19:20 crc kubenswrapper[4651]: I1011 05:19:20.068085 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1187c352-70c0-4b8f-a7fa-300e4093c60c-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-x9lqn\" (UID: \"1187c352-70c0-4b8f-a7fa-300e4093c60c\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-x9lqn" Oct 11 05:19:20 crc kubenswrapper[4651]: I1011 05:19:20.074067 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1187c352-70c0-4b8f-a7fa-300e4093c60c-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-x9lqn\" (UID: \"1187c352-70c0-4b8f-a7fa-300e4093c60c\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-x9lqn" Oct 11 05:19:20 crc kubenswrapper[4651]: I1011 05:19:20.079990 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1187c352-70c0-4b8f-a7fa-300e4093c60c-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-x9lqn\" (UID: \"1187c352-70c0-4b8f-a7fa-300e4093c60c\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-x9lqn" Oct 11 05:19:20 crc kubenswrapper[4651]: I1011 05:19:20.094027 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mcmh7\" (UniqueName: \"kubernetes.io/projected/1187c352-70c0-4b8f-a7fa-300e4093c60c-kube-api-access-mcmh7\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-x9lqn\" (UID: \"1187c352-70c0-4b8f-a7fa-300e4093c60c\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-x9lqn" Oct 11 05:19:20 crc kubenswrapper[4651]: I1011 05:19:20.125018 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-x9lqn" Oct 11 05:19:20 crc kubenswrapper[4651]: I1011 05:19:20.639924 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-x9lqn"] Oct 11 05:19:20 crc kubenswrapper[4651]: I1011 05:19:20.702917 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-x9lqn" event={"ID":"1187c352-70c0-4b8f-a7fa-300e4093c60c","Type":"ContainerStarted","Data":"efa3ba818c11ba3cee53b45a599be77619d32f6afcd52322b572fdbd001c2dd8"} Oct 11 05:19:21 crc kubenswrapper[4651]: I1011 05:19:21.038152 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-p2nff"] Oct 11 05:19:21 crc kubenswrapper[4651]: I1011 05:19:21.045047 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-p2nff"] Oct 11 05:19:21 crc kubenswrapper[4651]: I1011 05:19:21.723652 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-x9lqn" event={"ID":"1187c352-70c0-4b8f-a7fa-300e4093c60c","Type":"ContainerStarted","Data":"9e0e894495a6242bbd33613d813136fed85a34290b2a186168ceb4a1e1d08bdf"} Oct 11 05:19:21 crc kubenswrapper[4651]: I1011 05:19:21.748378 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-x9lqn" podStartSLOduration=2.217044809 podStartE2EDuration="2.748357624s" podCreationTimestamp="2025-10-11 05:19:19 +0000 UTC" firstStartedPulling="2025-10-11 05:19:20.678909572 +0000 UTC m=+1681.575142368" lastFinishedPulling="2025-10-11 05:19:21.210222337 +0000 UTC m=+1682.106455183" observedRunningTime="2025-10-11 05:19:21.743269405 +0000 UTC m=+1682.639502191" watchObservedRunningTime="2025-10-11 05:19:21.748357624 +0000 UTC m=+1682.644590420" Oct 11 05:19:21 crc kubenswrapper[4651]: I1011 05:19:21.881107 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb7ede0-6fea-4867-941e-13a4c5637543" path="/var/lib/kubelet/pods/3cb7ede0-6fea-4867-941e-13a4c5637543/volumes" Oct 11 05:19:22 crc kubenswrapper[4651]: I1011 05:19:22.030431 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-stlvp"] Oct 11 05:19:22 crc kubenswrapper[4651]: I1011 05:19:22.039324 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-vjf9n"] Oct 11 05:19:22 crc kubenswrapper[4651]: I1011 05:19:22.048123 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-stlvp"] Oct 11 05:19:22 crc kubenswrapper[4651]: I1011 05:19:22.055129 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-vjf9n"] Oct 11 05:19:23 crc kubenswrapper[4651]: I1011 05:19:23.870521 4651 scope.go:117] "RemoveContainer" containerID="9e22517d26caf83c6f2aa3910c4d04d694ebd6ca37f48f88ab2ba796ba35e6e7" Oct 11 05:19:23 crc kubenswrapper[4651]: E1011 05:19:23.871349 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 05:19:23 crc kubenswrapper[4651]: I1011 05:19:23.892100 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bbe8f4f-c85e-46aa-a214-e28fad1722dc" path="/var/lib/kubelet/pods/1bbe8f4f-c85e-46aa-a214-e28fad1722dc/volumes" Oct 11 05:19:23 crc kubenswrapper[4651]: I1011 05:19:23.893679 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b608bb15-f67e-4192-b820-29752a3cd443" path="/var/lib/kubelet/pods/b608bb15-f67e-4192-b820-29752a3cd443/volumes" Oct 11 05:19:26 crc kubenswrapper[4651]: I1011 05:19:26.781575 4651 generic.go:334] "Generic (PLEG): container finished" podID="1187c352-70c0-4b8f-a7fa-300e4093c60c" containerID="9e0e894495a6242bbd33613d813136fed85a34290b2a186168ceb4a1e1d08bdf" exitCode=0 Oct 11 05:19:26 crc kubenswrapper[4651]: I1011 05:19:26.781673 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-x9lqn" event={"ID":"1187c352-70c0-4b8f-a7fa-300e4093c60c","Type":"ContainerDied","Data":"9e0e894495a6242bbd33613d813136fed85a34290b2a186168ceb4a1e1d08bdf"} Oct 11 05:19:28 crc kubenswrapper[4651]: I1011 05:19:28.227889 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-x9lqn" Oct 11 05:19:28 crc kubenswrapper[4651]: I1011 05:19:28.347181 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1187c352-70c0-4b8f-a7fa-300e4093c60c-ssh-key\") pod \"1187c352-70c0-4b8f-a7fa-300e4093c60c\" (UID: \"1187c352-70c0-4b8f-a7fa-300e4093c60c\") " Oct 11 05:19:28 crc kubenswrapper[4651]: I1011 05:19:28.347271 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1187c352-70c0-4b8f-a7fa-300e4093c60c-inventory\") pod \"1187c352-70c0-4b8f-a7fa-300e4093c60c\" (UID: \"1187c352-70c0-4b8f-a7fa-300e4093c60c\") " Oct 11 05:19:28 crc kubenswrapper[4651]: I1011 05:19:28.347512 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mcmh7\" (UniqueName: \"kubernetes.io/projected/1187c352-70c0-4b8f-a7fa-300e4093c60c-kube-api-access-mcmh7\") pod \"1187c352-70c0-4b8f-a7fa-300e4093c60c\" (UID: \"1187c352-70c0-4b8f-a7fa-300e4093c60c\") " Oct 11 05:19:28 crc kubenswrapper[4651]: I1011 05:19:28.354553 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1187c352-70c0-4b8f-a7fa-300e4093c60c-kube-api-access-mcmh7" (OuterVolumeSpecName: "kube-api-access-mcmh7") pod "1187c352-70c0-4b8f-a7fa-300e4093c60c" (UID: "1187c352-70c0-4b8f-a7fa-300e4093c60c"). InnerVolumeSpecName "kube-api-access-mcmh7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:19:28 crc kubenswrapper[4651]: I1011 05:19:28.375921 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1187c352-70c0-4b8f-a7fa-300e4093c60c-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "1187c352-70c0-4b8f-a7fa-300e4093c60c" (UID: "1187c352-70c0-4b8f-a7fa-300e4093c60c"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:19:28 crc kubenswrapper[4651]: I1011 05:19:28.398549 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1187c352-70c0-4b8f-a7fa-300e4093c60c-inventory" (OuterVolumeSpecName: "inventory") pod "1187c352-70c0-4b8f-a7fa-300e4093c60c" (UID: "1187c352-70c0-4b8f-a7fa-300e4093c60c"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:19:28 crc kubenswrapper[4651]: I1011 05:19:28.449618 4651 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1187c352-70c0-4b8f-a7fa-300e4093c60c-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 11 05:19:28 crc kubenswrapper[4651]: I1011 05:19:28.449664 4651 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1187c352-70c0-4b8f-a7fa-300e4093c60c-inventory\") on node \"crc\" DevicePath \"\"" Oct 11 05:19:28 crc kubenswrapper[4651]: I1011 05:19:28.449682 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mcmh7\" (UniqueName: \"kubernetes.io/projected/1187c352-70c0-4b8f-a7fa-300e4093c60c-kube-api-access-mcmh7\") on node \"crc\" DevicePath \"\"" Oct 11 05:19:28 crc kubenswrapper[4651]: I1011 05:19:28.805308 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-x9lqn" event={"ID":"1187c352-70c0-4b8f-a7fa-300e4093c60c","Type":"ContainerDied","Data":"efa3ba818c11ba3cee53b45a599be77619d32f6afcd52322b572fdbd001c2dd8"} Oct 11 05:19:28 crc kubenswrapper[4651]: I1011 05:19:28.805348 4651 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="efa3ba818c11ba3cee53b45a599be77619d32f6afcd52322b572fdbd001c2dd8" Oct 11 05:19:28 crc kubenswrapper[4651]: I1011 05:19:28.805355 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-x9lqn" Oct 11 05:19:28 crc kubenswrapper[4651]: I1011 05:19:28.887797 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-bbbdw"] Oct 11 05:19:28 crc kubenswrapper[4651]: E1011 05:19:28.889897 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1187c352-70c0-4b8f-a7fa-300e4093c60c" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Oct 11 05:19:28 crc kubenswrapper[4651]: I1011 05:19:28.889926 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="1187c352-70c0-4b8f-a7fa-300e4093c60c" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Oct 11 05:19:28 crc kubenswrapper[4651]: I1011 05:19:28.892891 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="1187c352-70c0-4b8f-a7fa-300e4093c60c" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Oct 11 05:19:28 crc kubenswrapper[4651]: I1011 05:19:28.900133 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-bbbdw" Oct 11 05:19:28 crc kubenswrapper[4651]: I1011 05:19:28.905330 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 11 05:19:28 crc kubenswrapper[4651]: I1011 05:19:28.906628 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-r489p" Oct 11 05:19:28 crc kubenswrapper[4651]: I1011 05:19:28.907323 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 11 05:19:28 crc kubenswrapper[4651]: I1011 05:19:28.913103 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 11 05:19:28 crc kubenswrapper[4651]: I1011 05:19:28.928113 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-bbbdw"] Oct 11 05:19:29 crc kubenswrapper[4651]: I1011 05:19:29.065046 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hplhc\" (UniqueName: \"kubernetes.io/projected/b6609904-4dac-496c-b95b-583873422810-kube-api-access-hplhc\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-bbbdw\" (UID: \"b6609904-4dac-496c-b95b-583873422810\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-bbbdw" Oct 11 05:19:29 crc kubenswrapper[4651]: I1011 05:19:29.065288 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b6609904-4dac-496c-b95b-583873422810-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-bbbdw\" (UID: \"b6609904-4dac-496c-b95b-583873422810\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-bbbdw" Oct 11 05:19:29 crc kubenswrapper[4651]: I1011 05:19:29.065339 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b6609904-4dac-496c-b95b-583873422810-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-bbbdw\" (UID: \"b6609904-4dac-496c-b95b-583873422810\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-bbbdw" Oct 11 05:19:29 crc kubenswrapper[4651]: I1011 05:19:29.167305 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b6609904-4dac-496c-b95b-583873422810-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-bbbdw\" (UID: \"b6609904-4dac-496c-b95b-583873422810\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-bbbdw" Oct 11 05:19:29 crc kubenswrapper[4651]: I1011 05:19:29.167365 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b6609904-4dac-496c-b95b-583873422810-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-bbbdw\" (UID: \"b6609904-4dac-496c-b95b-583873422810\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-bbbdw" Oct 11 05:19:29 crc kubenswrapper[4651]: I1011 05:19:29.167408 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hplhc\" (UniqueName: \"kubernetes.io/projected/b6609904-4dac-496c-b95b-583873422810-kube-api-access-hplhc\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-bbbdw\" (UID: \"b6609904-4dac-496c-b95b-583873422810\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-bbbdw" Oct 11 05:19:29 crc kubenswrapper[4651]: I1011 05:19:29.173573 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b6609904-4dac-496c-b95b-583873422810-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-bbbdw\" (UID: \"b6609904-4dac-496c-b95b-583873422810\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-bbbdw" Oct 11 05:19:29 crc kubenswrapper[4651]: I1011 05:19:29.181775 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b6609904-4dac-496c-b95b-583873422810-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-bbbdw\" (UID: \"b6609904-4dac-496c-b95b-583873422810\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-bbbdw" Oct 11 05:19:29 crc kubenswrapper[4651]: I1011 05:19:29.190147 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hplhc\" (UniqueName: \"kubernetes.io/projected/b6609904-4dac-496c-b95b-583873422810-kube-api-access-hplhc\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-bbbdw\" (UID: \"b6609904-4dac-496c-b95b-583873422810\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-bbbdw" Oct 11 05:19:29 crc kubenswrapper[4651]: I1011 05:19:29.228181 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-bbbdw" Oct 11 05:19:29 crc kubenswrapper[4651]: I1011 05:19:29.757055 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-bbbdw"] Oct 11 05:19:29 crc kubenswrapper[4651]: I1011 05:19:29.816210 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-bbbdw" event={"ID":"b6609904-4dac-496c-b95b-583873422810","Type":"ContainerStarted","Data":"f39865773bb06419b81a1475ca80736ecaed5d184185ae8ca642689120f5c81e"} Oct 11 05:19:30 crc kubenswrapper[4651]: I1011 05:19:30.828430 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-bbbdw" event={"ID":"b6609904-4dac-496c-b95b-583873422810","Type":"ContainerStarted","Data":"e972a7ac32c70797b3443b345a0cc8c762ed1dcbccc1766e99b595f65e52463a"} Oct 11 05:19:30 crc kubenswrapper[4651]: I1011 05:19:30.851913 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-bbbdw" podStartSLOduration=2.457233318 podStartE2EDuration="2.851894997s" podCreationTimestamp="2025-10-11 05:19:28 +0000 UTC" firstStartedPulling="2025-10-11 05:19:29.767097009 +0000 UTC m=+1690.663329805" lastFinishedPulling="2025-10-11 05:19:30.161758688 +0000 UTC m=+1691.057991484" observedRunningTime="2025-10-11 05:19:30.846516551 +0000 UTC m=+1691.742749407" watchObservedRunningTime="2025-10-11 05:19:30.851894997 +0000 UTC m=+1691.748127793" Oct 11 05:19:31 crc kubenswrapper[4651]: I1011 05:19:31.034503 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-b783-account-create-z7jwg"] Oct 11 05:19:31 crc kubenswrapper[4651]: I1011 05:19:31.043920 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-6859-account-create-xzc42"] Oct 11 05:19:31 crc kubenswrapper[4651]: I1011 05:19:31.055002 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-b783-account-create-z7jwg"] Oct 11 05:19:31 crc kubenswrapper[4651]: I1011 05:19:31.061925 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-6859-account-create-xzc42"] Oct 11 05:19:31 crc kubenswrapper[4651]: I1011 05:19:31.887123 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7edf14d6-b944-4477-8803-9536566e63ba" path="/var/lib/kubelet/pods/7edf14d6-b944-4477-8803-9536566e63ba/volumes" Oct 11 05:19:31 crc kubenswrapper[4651]: I1011 05:19:31.888331 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dd2c676b-5ffe-4117-bd68-481ab14bcfb6" path="/var/lib/kubelet/pods/dd2c676b-5ffe-4117-bd68-481ab14bcfb6/volumes" Oct 11 05:19:32 crc kubenswrapper[4651]: I1011 05:19:32.030135 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-5fc8-account-create-jm72j"] Oct 11 05:19:32 crc kubenswrapper[4651]: I1011 05:19:32.037765 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-5fc8-account-create-jm72j"] Oct 11 05:19:33 crc kubenswrapper[4651]: I1011 05:19:33.879024 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="949f137e-f9eb-43e5-b1bc-cea47b356ee6" path="/var/lib/kubelet/pods/949f137e-f9eb-43e5-b1bc-cea47b356ee6/volumes" Oct 11 05:19:33 crc kubenswrapper[4651]: I1011 05:19:33.985969 4651 scope.go:117] "RemoveContainer" containerID="edee163ecd42935dd7592d5e2c2dcd46840fd077fc95d7b36b0184ae68e6eae3" Oct 11 05:19:34 crc kubenswrapper[4651]: I1011 05:19:34.009628 4651 scope.go:117] "RemoveContainer" containerID="b3b5369c0a55289cfcc8db9cbad784bee96a7dc94e001fb91bd38773edb31c66" Oct 11 05:19:34 crc kubenswrapper[4651]: I1011 05:19:34.052216 4651 scope.go:117] "RemoveContainer" containerID="7ee629052e142cfd4778cbf4575b9ee3e1b2d3ac56c7f8a3791f76059068b59a" Oct 11 05:19:34 crc kubenswrapper[4651]: I1011 05:19:34.091980 4651 scope.go:117] "RemoveContainer" containerID="055f0a58a9b908f88f1f6c166107ddd0debed02cd259082260e4f4c97f8a9c88" Oct 11 05:19:34 crc kubenswrapper[4651]: I1011 05:19:34.139987 4651 scope.go:117] "RemoveContainer" containerID="43763f5ef3d329b5e4c0b696844afe6d616f7ce3113c771c99213e83481c19c2" Oct 11 05:19:34 crc kubenswrapper[4651]: I1011 05:19:34.195072 4651 scope.go:117] "RemoveContainer" containerID="97eb21467ac13a278c5276c9ef33b51b684f94f0ddbeea3ea84e7606baaf3708" Oct 11 05:19:34 crc kubenswrapper[4651]: I1011 05:19:34.242988 4651 scope.go:117] "RemoveContainer" containerID="33a245491c814be790ad16a8c1d5a6a92c943dbd5caeca8dbd7d3ebf0b9bd67a" Oct 11 05:19:37 crc kubenswrapper[4651]: I1011 05:19:37.870497 4651 scope.go:117] "RemoveContainer" containerID="9e22517d26caf83c6f2aa3910c4d04d694ebd6ca37f48f88ab2ba796ba35e6e7" Oct 11 05:19:37 crc kubenswrapper[4651]: E1011 05:19:37.871677 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 05:19:48 crc kubenswrapper[4651]: I1011 05:19:48.870421 4651 scope.go:117] "RemoveContainer" containerID="9e22517d26caf83c6f2aa3910c4d04d694ebd6ca37f48f88ab2ba796ba35e6e7" Oct 11 05:19:48 crc kubenswrapper[4651]: E1011 05:19:48.871308 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 05:19:54 crc kubenswrapper[4651]: I1011 05:19:54.059286 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-464ng"] Oct 11 05:19:54 crc kubenswrapper[4651]: I1011 05:19:54.078650 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-464ng"] Oct 11 05:19:55 crc kubenswrapper[4651]: I1011 05:19:55.888418 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="899b7e7f-0b2f-48f9-8b7e-c86614b06f41" path="/var/lib/kubelet/pods/899b7e7f-0b2f-48f9-8b7e-c86614b06f41/volumes" Oct 11 05:20:03 crc kubenswrapper[4651]: I1011 05:20:03.870216 4651 scope.go:117] "RemoveContainer" containerID="9e22517d26caf83c6f2aa3910c4d04d694ebd6ca37f48f88ab2ba796ba35e6e7" Oct 11 05:20:03 crc kubenswrapper[4651]: E1011 05:20:03.871616 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 05:20:09 crc kubenswrapper[4651]: I1011 05:20:09.220958 4651 generic.go:334] "Generic (PLEG): container finished" podID="b6609904-4dac-496c-b95b-583873422810" containerID="e972a7ac32c70797b3443b345a0cc8c762ed1dcbccc1766e99b595f65e52463a" exitCode=0 Oct 11 05:20:09 crc kubenswrapper[4651]: I1011 05:20:09.221640 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-bbbdw" event={"ID":"b6609904-4dac-496c-b95b-583873422810","Type":"ContainerDied","Data":"e972a7ac32c70797b3443b345a0cc8c762ed1dcbccc1766e99b595f65e52463a"} Oct 11 05:20:10 crc kubenswrapper[4651]: I1011 05:20:10.657563 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-bbbdw" Oct 11 05:20:10 crc kubenswrapper[4651]: I1011 05:20:10.829930 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b6609904-4dac-496c-b95b-583873422810-ssh-key\") pod \"b6609904-4dac-496c-b95b-583873422810\" (UID: \"b6609904-4dac-496c-b95b-583873422810\") " Oct 11 05:20:10 crc kubenswrapper[4651]: I1011 05:20:10.830141 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hplhc\" (UniqueName: \"kubernetes.io/projected/b6609904-4dac-496c-b95b-583873422810-kube-api-access-hplhc\") pod \"b6609904-4dac-496c-b95b-583873422810\" (UID: \"b6609904-4dac-496c-b95b-583873422810\") " Oct 11 05:20:10 crc kubenswrapper[4651]: I1011 05:20:10.830200 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b6609904-4dac-496c-b95b-583873422810-inventory\") pod \"b6609904-4dac-496c-b95b-583873422810\" (UID: \"b6609904-4dac-496c-b95b-583873422810\") " Oct 11 05:20:10 crc kubenswrapper[4651]: I1011 05:20:10.838272 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6609904-4dac-496c-b95b-583873422810-kube-api-access-hplhc" (OuterVolumeSpecName: "kube-api-access-hplhc") pod "b6609904-4dac-496c-b95b-583873422810" (UID: "b6609904-4dac-496c-b95b-583873422810"). InnerVolumeSpecName "kube-api-access-hplhc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:20:10 crc kubenswrapper[4651]: I1011 05:20:10.866006 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6609904-4dac-496c-b95b-583873422810-inventory" (OuterVolumeSpecName: "inventory") pod "b6609904-4dac-496c-b95b-583873422810" (UID: "b6609904-4dac-496c-b95b-583873422810"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:20:10 crc kubenswrapper[4651]: I1011 05:20:10.893119 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6609904-4dac-496c-b95b-583873422810-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "b6609904-4dac-496c-b95b-583873422810" (UID: "b6609904-4dac-496c-b95b-583873422810"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:20:10 crc kubenswrapper[4651]: I1011 05:20:10.933456 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hplhc\" (UniqueName: \"kubernetes.io/projected/b6609904-4dac-496c-b95b-583873422810-kube-api-access-hplhc\") on node \"crc\" DevicePath \"\"" Oct 11 05:20:10 crc kubenswrapper[4651]: I1011 05:20:10.933581 4651 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b6609904-4dac-496c-b95b-583873422810-inventory\") on node \"crc\" DevicePath \"\"" Oct 11 05:20:10 crc kubenswrapper[4651]: I1011 05:20:10.933673 4651 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b6609904-4dac-496c-b95b-583873422810-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 11 05:20:11 crc kubenswrapper[4651]: I1011 05:20:11.247260 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-bbbdw" event={"ID":"b6609904-4dac-496c-b95b-583873422810","Type":"ContainerDied","Data":"f39865773bb06419b81a1475ca80736ecaed5d184185ae8ca642689120f5c81e"} Oct 11 05:20:11 crc kubenswrapper[4651]: I1011 05:20:11.247335 4651 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f39865773bb06419b81a1475ca80736ecaed5d184185ae8ca642689120f5c81e" Oct 11 05:20:11 crc kubenswrapper[4651]: I1011 05:20:11.247286 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-bbbdw" Oct 11 05:20:11 crc kubenswrapper[4651]: I1011 05:20:11.344612 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-5wgh8"] Oct 11 05:20:11 crc kubenswrapper[4651]: E1011 05:20:11.345103 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b6609904-4dac-496c-b95b-583873422810" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Oct 11 05:20:11 crc kubenswrapper[4651]: I1011 05:20:11.345120 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="b6609904-4dac-496c-b95b-583873422810" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Oct 11 05:20:11 crc kubenswrapper[4651]: I1011 05:20:11.345406 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="b6609904-4dac-496c-b95b-583873422810" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Oct 11 05:20:11 crc kubenswrapper[4651]: I1011 05:20:11.346141 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-5wgh8" Oct 11 05:20:11 crc kubenswrapper[4651]: I1011 05:20:11.349166 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 11 05:20:11 crc kubenswrapper[4651]: I1011 05:20:11.349197 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 11 05:20:11 crc kubenswrapper[4651]: I1011 05:20:11.349280 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 11 05:20:11 crc kubenswrapper[4651]: I1011 05:20:11.352387 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-r489p" Oct 11 05:20:11 crc kubenswrapper[4651]: I1011 05:20:11.374716 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-5wgh8"] Oct 11 05:20:11 crc kubenswrapper[4651]: I1011 05:20:11.442222 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bd4b257e-2d94-4f78-9ff5-cef288fd0858-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-5wgh8\" (UID: \"bd4b257e-2d94-4f78-9ff5-cef288fd0858\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-5wgh8" Oct 11 05:20:11 crc kubenswrapper[4651]: I1011 05:20:11.442295 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wxm2k\" (UniqueName: \"kubernetes.io/projected/bd4b257e-2d94-4f78-9ff5-cef288fd0858-kube-api-access-wxm2k\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-5wgh8\" (UID: \"bd4b257e-2d94-4f78-9ff5-cef288fd0858\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-5wgh8" Oct 11 05:20:11 crc kubenswrapper[4651]: I1011 05:20:11.442333 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/bd4b257e-2d94-4f78-9ff5-cef288fd0858-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-5wgh8\" (UID: \"bd4b257e-2d94-4f78-9ff5-cef288fd0858\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-5wgh8" Oct 11 05:20:11 crc kubenswrapper[4651]: I1011 05:20:11.544596 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bd4b257e-2d94-4f78-9ff5-cef288fd0858-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-5wgh8\" (UID: \"bd4b257e-2d94-4f78-9ff5-cef288fd0858\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-5wgh8" Oct 11 05:20:11 crc kubenswrapper[4651]: I1011 05:20:11.545013 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wxm2k\" (UniqueName: \"kubernetes.io/projected/bd4b257e-2d94-4f78-9ff5-cef288fd0858-kube-api-access-wxm2k\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-5wgh8\" (UID: \"bd4b257e-2d94-4f78-9ff5-cef288fd0858\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-5wgh8" Oct 11 05:20:11 crc kubenswrapper[4651]: I1011 05:20:11.545175 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/bd4b257e-2d94-4f78-9ff5-cef288fd0858-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-5wgh8\" (UID: \"bd4b257e-2d94-4f78-9ff5-cef288fd0858\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-5wgh8" Oct 11 05:20:11 crc kubenswrapper[4651]: I1011 05:20:11.548265 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bd4b257e-2d94-4f78-9ff5-cef288fd0858-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-5wgh8\" (UID: \"bd4b257e-2d94-4f78-9ff5-cef288fd0858\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-5wgh8" Oct 11 05:20:11 crc kubenswrapper[4651]: I1011 05:20:11.548352 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/bd4b257e-2d94-4f78-9ff5-cef288fd0858-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-5wgh8\" (UID: \"bd4b257e-2d94-4f78-9ff5-cef288fd0858\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-5wgh8" Oct 11 05:20:11 crc kubenswrapper[4651]: I1011 05:20:11.581337 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wxm2k\" (UniqueName: \"kubernetes.io/projected/bd4b257e-2d94-4f78-9ff5-cef288fd0858-kube-api-access-wxm2k\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-5wgh8\" (UID: \"bd4b257e-2d94-4f78-9ff5-cef288fd0858\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-5wgh8" Oct 11 05:20:11 crc kubenswrapper[4651]: I1011 05:20:11.664816 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-5wgh8" Oct 11 05:20:12 crc kubenswrapper[4651]: I1011 05:20:12.238650 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-5wgh8"] Oct 11 05:20:12 crc kubenswrapper[4651]: I1011 05:20:12.260188 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-5wgh8" event={"ID":"bd4b257e-2d94-4f78-9ff5-cef288fd0858","Type":"ContainerStarted","Data":"b2d8678bb8424eca1796dbe9831d969f934d4717ccb5917a5db030486e56acee"} Oct 11 05:20:13 crc kubenswrapper[4651]: I1011 05:20:13.272029 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-5wgh8" event={"ID":"bd4b257e-2d94-4f78-9ff5-cef288fd0858","Type":"ContainerStarted","Data":"d37bb95ec5244c1e78e258e15cfc8233591ce2c1059875d83a0b85de16ba2e10"} Oct 11 05:20:13 crc kubenswrapper[4651]: I1011 05:20:13.300188 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-5wgh8" podStartSLOduration=1.857754772 podStartE2EDuration="2.300156298s" podCreationTimestamp="2025-10-11 05:20:11 +0000 UTC" firstStartedPulling="2025-10-11 05:20:12.246703491 +0000 UTC m=+1733.142936297" lastFinishedPulling="2025-10-11 05:20:12.689105017 +0000 UTC m=+1733.585337823" observedRunningTime="2025-10-11 05:20:13.290493334 +0000 UTC m=+1734.186726130" watchObservedRunningTime="2025-10-11 05:20:13.300156298 +0000 UTC m=+1734.196389134" Oct 11 05:20:17 crc kubenswrapper[4651]: I1011 05:20:17.054184 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-r47rs"] Oct 11 05:20:17 crc kubenswrapper[4651]: I1011 05:20:17.060351 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-prppv"] Oct 11 05:20:17 crc kubenswrapper[4651]: I1011 05:20:17.068180 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-prppv"] Oct 11 05:20:17 crc kubenswrapper[4651]: I1011 05:20:17.077082 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-r47rs"] Oct 11 05:20:17 crc kubenswrapper[4651]: I1011 05:20:17.871873 4651 scope.go:117] "RemoveContainer" containerID="9e22517d26caf83c6f2aa3910c4d04d694ebd6ca37f48f88ab2ba796ba35e6e7" Oct 11 05:20:17 crc kubenswrapper[4651]: E1011 05:20:17.872730 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 05:20:17 crc kubenswrapper[4651]: I1011 05:20:17.894357 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0a3cd96c-5270-46d9-befe-b18a3467ddde" path="/var/lib/kubelet/pods/0a3cd96c-5270-46d9-befe-b18a3467ddde/volumes" Oct 11 05:20:17 crc kubenswrapper[4651]: I1011 05:20:17.896149 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5a6ed193-5ad3-4f79-aa3d-d35033be1f21" path="/var/lib/kubelet/pods/5a6ed193-5ad3-4f79-aa3d-d35033be1f21/volumes" Oct 11 05:20:31 crc kubenswrapper[4651]: I1011 05:20:31.869806 4651 scope.go:117] "RemoveContainer" containerID="9e22517d26caf83c6f2aa3910c4d04d694ebd6ca37f48f88ab2ba796ba35e6e7" Oct 11 05:20:31 crc kubenswrapper[4651]: E1011 05:20:31.871159 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 05:20:34 crc kubenswrapper[4651]: I1011 05:20:34.388952 4651 scope.go:117] "RemoveContainer" containerID="d4e0a300f02f03e5a9ba3d36581d964fef9f440b4fc093ec7fbcdabc318650c9" Oct 11 05:20:34 crc kubenswrapper[4651]: I1011 05:20:34.431418 4651 scope.go:117] "RemoveContainer" containerID="c7c739f99e77a0286e117cc409a94b977ca85e1f2f58b286780045ba328fa488" Oct 11 05:20:34 crc kubenswrapper[4651]: I1011 05:20:34.493872 4651 scope.go:117] "RemoveContainer" containerID="f0d5dd42154f318918654257dcf6c3f9e7191592a7221086692304cf8696c65e" Oct 11 05:20:43 crc kubenswrapper[4651]: I1011 05:20:43.870175 4651 scope.go:117] "RemoveContainer" containerID="9e22517d26caf83c6f2aa3910c4d04d694ebd6ca37f48f88ab2ba796ba35e6e7" Oct 11 05:20:43 crc kubenswrapper[4651]: E1011 05:20:43.871061 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 05:20:56 crc kubenswrapper[4651]: I1011 05:20:56.870088 4651 scope.go:117] "RemoveContainer" containerID="9e22517d26caf83c6f2aa3910c4d04d694ebd6ca37f48f88ab2ba796ba35e6e7" Oct 11 05:20:56 crc kubenswrapper[4651]: E1011 05:20:56.871699 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 05:21:01 crc kubenswrapper[4651]: I1011 05:21:01.042055 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-9mvdn"] Oct 11 05:21:01 crc kubenswrapper[4651]: I1011 05:21:01.052775 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-9mvdn"] Oct 11 05:21:01 crc kubenswrapper[4651]: I1011 05:21:01.881279 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8ea769ce-f3c9-481f-9544-0d096ba2d0c8" path="/var/lib/kubelet/pods/8ea769ce-f3c9-481f-9544-0d096ba2d0c8/volumes" Oct 11 05:21:08 crc kubenswrapper[4651]: I1011 05:21:08.869951 4651 scope.go:117] "RemoveContainer" containerID="9e22517d26caf83c6f2aa3910c4d04d694ebd6ca37f48f88ab2ba796ba35e6e7" Oct 11 05:21:08 crc kubenswrapper[4651]: E1011 05:21:08.870527 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 05:21:11 crc kubenswrapper[4651]: I1011 05:21:11.965241 4651 generic.go:334] "Generic (PLEG): container finished" podID="bd4b257e-2d94-4f78-9ff5-cef288fd0858" containerID="d37bb95ec5244c1e78e258e15cfc8233591ce2c1059875d83a0b85de16ba2e10" exitCode=2 Oct 11 05:21:11 crc kubenswrapper[4651]: I1011 05:21:11.965337 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-5wgh8" event={"ID":"bd4b257e-2d94-4f78-9ff5-cef288fd0858","Type":"ContainerDied","Data":"d37bb95ec5244c1e78e258e15cfc8233591ce2c1059875d83a0b85de16ba2e10"} Oct 11 05:21:13 crc kubenswrapper[4651]: I1011 05:21:13.515265 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-5wgh8" Oct 11 05:21:13 crc kubenswrapper[4651]: I1011 05:21:13.587239 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxm2k\" (UniqueName: \"kubernetes.io/projected/bd4b257e-2d94-4f78-9ff5-cef288fd0858-kube-api-access-wxm2k\") pod \"bd4b257e-2d94-4f78-9ff5-cef288fd0858\" (UID: \"bd4b257e-2d94-4f78-9ff5-cef288fd0858\") " Oct 11 05:21:13 crc kubenswrapper[4651]: I1011 05:21:13.587346 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/bd4b257e-2d94-4f78-9ff5-cef288fd0858-ssh-key\") pod \"bd4b257e-2d94-4f78-9ff5-cef288fd0858\" (UID: \"bd4b257e-2d94-4f78-9ff5-cef288fd0858\") " Oct 11 05:21:13 crc kubenswrapper[4651]: I1011 05:21:13.587501 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bd4b257e-2d94-4f78-9ff5-cef288fd0858-inventory\") pod \"bd4b257e-2d94-4f78-9ff5-cef288fd0858\" (UID: \"bd4b257e-2d94-4f78-9ff5-cef288fd0858\") " Oct 11 05:21:13 crc kubenswrapper[4651]: I1011 05:21:13.599470 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd4b257e-2d94-4f78-9ff5-cef288fd0858-kube-api-access-wxm2k" (OuterVolumeSpecName: "kube-api-access-wxm2k") pod "bd4b257e-2d94-4f78-9ff5-cef288fd0858" (UID: "bd4b257e-2d94-4f78-9ff5-cef288fd0858"). InnerVolumeSpecName "kube-api-access-wxm2k". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:21:13 crc kubenswrapper[4651]: I1011 05:21:13.623308 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bd4b257e-2d94-4f78-9ff5-cef288fd0858-inventory" (OuterVolumeSpecName: "inventory") pod "bd4b257e-2d94-4f78-9ff5-cef288fd0858" (UID: "bd4b257e-2d94-4f78-9ff5-cef288fd0858"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:21:13 crc kubenswrapper[4651]: I1011 05:21:13.631069 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bd4b257e-2d94-4f78-9ff5-cef288fd0858-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "bd4b257e-2d94-4f78-9ff5-cef288fd0858" (UID: "bd4b257e-2d94-4f78-9ff5-cef288fd0858"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:21:13 crc kubenswrapper[4651]: I1011 05:21:13.690197 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxm2k\" (UniqueName: \"kubernetes.io/projected/bd4b257e-2d94-4f78-9ff5-cef288fd0858-kube-api-access-wxm2k\") on node \"crc\" DevicePath \"\"" Oct 11 05:21:13 crc kubenswrapper[4651]: I1011 05:21:13.690233 4651 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/bd4b257e-2d94-4f78-9ff5-cef288fd0858-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 11 05:21:13 crc kubenswrapper[4651]: I1011 05:21:13.690242 4651 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bd4b257e-2d94-4f78-9ff5-cef288fd0858-inventory\") on node \"crc\" DevicePath \"\"" Oct 11 05:21:13 crc kubenswrapper[4651]: I1011 05:21:13.991312 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-5wgh8" event={"ID":"bd4b257e-2d94-4f78-9ff5-cef288fd0858","Type":"ContainerDied","Data":"b2d8678bb8424eca1796dbe9831d969f934d4717ccb5917a5db030486e56acee"} Oct 11 05:21:13 crc kubenswrapper[4651]: I1011 05:21:13.991574 4651 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b2d8678bb8424eca1796dbe9831d969f934d4717ccb5917a5db030486e56acee" Oct 11 05:21:13 crc kubenswrapper[4651]: I1011 05:21:13.991443 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-5wgh8" Oct 11 05:21:19 crc kubenswrapper[4651]: I1011 05:21:19.869695 4651 scope.go:117] "RemoveContainer" containerID="9e22517d26caf83c6f2aa3910c4d04d694ebd6ca37f48f88ab2ba796ba35e6e7" Oct 11 05:21:19 crc kubenswrapper[4651]: E1011 05:21:19.871013 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 05:21:21 crc kubenswrapper[4651]: I1011 05:21:21.027750 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-zjfbr"] Oct 11 05:21:21 crc kubenswrapper[4651]: E1011 05:21:21.028957 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd4b257e-2d94-4f78-9ff5-cef288fd0858" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 11 05:21:21 crc kubenswrapper[4651]: I1011 05:21:21.029064 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd4b257e-2d94-4f78-9ff5-cef288fd0858" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 11 05:21:21 crc kubenswrapper[4651]: I1011 05:21:21.029312 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="bd4b257e-2d94-4f78-9ff5-cef288fd0858" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 11 05:21:21 crc kubenswrapper[4651]: I1011 05:21:21.030034 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-zjfbr" Oct 11 05:21:21 crc kubenswrapper[4651]: I1011 05:21:21.033942 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 11 05:21:21 crc kubenswrapper[4651]: I1011 05:21:21.034216 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-r489p" Oct 11 05:21:21 crc kubenswrapper[4651]: I1011 05:21:21.036525 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 11 05:21:21 crc kubenswrapper[4651]: I1011 05:21:21.036580 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 11 05:21:21 crc kubenswrapper[4651]: I1011 05:21:21.044545 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-zjfbr"] Oct 11 05:21:21 crc kubenswrapper[4651]: I1011 05:21:21.168675 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/18c28764-b000-46b3-af99-9410c165ff04-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-zjfbr\" (UID: \"18c28764-b000-46b3-af99-9410c165ff04\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-zjfbr" Oct 11 05:21:21 crc kubenswrapper[4651]: I1011 05:21:21.168771 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/18c28764-b000-46b3-af99-9410c165ff04-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-zjfbr\" (UID: \"18c28764-b000-46b3-af99-9410c165ff04\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-zjfbr" Oct 11 05:21:21 crc kubenswrapper[4651]: I1011 05:21:21.168901 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wjtz7\" (UniqueName: \"kubernetes.io/projected/18c28764-b000-46b3-af99-9410c165ff04-kube-api-access-wjtz7\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-zjfbr\" (UID: \"18c28764-b000-46b3-af99-9410c165ff04\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-zjfbr" Oct 11 05:21:21 crc kubenswrapper[4651]: I1011 05:21:21.271509 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/18c28764-b000-46b3-af99-9410c165ff04-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-zjfbr\" (UID: \"18c28764-b000-46b3-af99-9410c165ff04\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-zjfbr" Oct 11 05:21:21 crc kubenswrapper[4651]: I1011 05:21:21.271965 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/18c28764-b000-46b3-af99-9410c165ff04-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-zjfbr\" (UID: \"18c28764-b000-46b3-af99-9410c165ff04\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-zjfbr" Oct 11 05:21:21 crc kubenswrapper[4651]: I1011 05:21:21.272184 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wjtz7\" (UniqueName: \"kubernetes.io/projected/18c28764-b000-46b3-af99-9410c165ff04-kube-api-access-wjtz7\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-zjfbr\" (UID: \"18c28764-b000-46b3-af99-9410c165ff04\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-zjfbr" Oct 11 05:21:21 crc kubenswrapper[4651]: I1011 05:21:21.283126 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/18c28764-b000-46b3-af99-9410c165ff04-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-zjfbr\" (UID: \"18c28764-b000-46b3-af99-9410c165ff04\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-zjfbr" Oct 11 05:21:21 crc kubenswrapper[4651]: I1011 05:21:21.286006 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/18c28764-b000-46b3-af99-9410c165ff04-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-zjfbr\" (UID: \"18c28764-b000-46b3-af99-9410c165ff04\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-zjfbr" Oct 11 05:21:21 crc kubenswrapper[4651]: I1011 05:21:21.292472 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wjtz7\" (UniqueName: \"kubernetes.io/projected/18c28764-b000-46b3-af99-9410c165ff04-kube-api-access-wjtz7\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-zjfbr\" (UID: \"18c28764-b000-46b3-af99-9410c165ff04\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-zjfbr" Oct 11 05:21:21 crc kubenswrapper[4651]: I1011 05:21:21.370029 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-zjfbr" Oct 11 05:21:21 crc kubenswrapper[4651]: I1011 05:21:21.788321 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-zjfbr"] Oct 11 05:21:22 crc kubenswrapper[4651]: I1011 05:21:22.080529 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-zjfbr" event={"ID":"18c28764-b000-46b3-af99-9410c165ff04","Type":"ContainerStarted","Data":"812aa16fded2a34f58d996d1adb8e825343e93ecdc1d8cd9ba33cba0a0fc1cda"} Oct 11 05:21:23 crc kubenswrapper[4651]: I1011 05:21:23.093776 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-zjfbr" event={"ID":"18c28764-b000-46b3-af99-9410c165ff04","Type":"ContainerStarted","Data":"c1fc584d364b2a38034b45a41ae48d4b1578d678917fbcdbc91ba6286d576109"} Oct 11 05:21:23 crc kubenswrapper[4651]: I1011 05:21:23.138640 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-zjfbr" podStartSLOduration=1.6263178649999999 podStartE2EDuration="2.138615159s" podCreationTimestamp="2025-10-11 05:21:21 +0000 UTC" firstStartedPulling="2025-10-11 05:21:21.802863244 +0000 UTC m=+1802.699096040" lastFinishedPulling="2025-10-11 05:21:22.315160528 +0000 UTC m=+1803.211393334" observedRunningTime="2025-10-11 05:21:23.124858917 +0000 UTC m=+1804.021091783" watchObservedRunningTime="2025-10-11 05:21:23.138615159 +0000 UTC m=+1804.034847995" Oct 11 05:21:30 crc kubenswrapper[4651]: I1011 05:21:30.869674 4651 scope.go:117] "RemoveContainer" containerID="9e22517d26caf83c6f2aa3910c4d04d694ebd6ca37f48f88ab2ba796ba35e6e7" Oct 11 05:21:30 crc kubenswrapper[4651]: E1011 05:21:30.871030 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 05:21:34 crc kubenswrapper[4651]: I1011 05:21:34.608537 4651 scope.go:117] "RemoveContainer" containerID="2ff1626a641a3a945708d87d3446cb08598ac5bcf8b714ead332dd45584e7ea7" Oct 11 05:21:41 crc kubenswrapper[4651]: I1011 05:21:41.870351 4651 scope.go:117] "RemoveContainer" containerID="9e22517d26caf83c6f2aa3910c4d04d694ebd6ca37f48f88ab2ba796ba35e6e7" Oct 11 05:21:41 crc kubenswrapper[4651]: E1011 05:21:41.870958 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 05:21:53 crc kubenswrapper[4651]: I1011 05:21:53.870066 4651 scope.go:117] "RemoveContainer" containerID="9e22517d26caf83c6f2aa3910c4d04d694ebd6ca37f48f88ab2ba796ba35e6e7" Oct 11 05:21:53 crc kubenswrapper[4651]: E1011 05:21:53.871174 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 05:22:07 crc kubenswrapper[4651]: I1011 05:22:07.869680 4651 scope.go:117] "RemoveContainer" containerID="9e22517d26caf83c6f2aa3910c4d04d694ebd6ca37f48f88ab2ba796ba35e6e7" Oct 11 05:22:07 crc kubenswrapper[4651]: E1011 05:22:07.870878 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 05:22:11 crc kubenswrapper[4651]: I1011 05:22:11.653270 4651 generic.go:334] "Generic (PLEG): container finished" podID="18c28764-b000-46b3-af99-9410c165ff04" containerID="c1fc584d364b2a38034b45a41ae48d4b1578d678917fbcdbc91ba6286d576109" exitCode=0 Oct 11 05:22:11 crc kubenswrapper[4651]: I1011 05:22:11.653354 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-zjfbr" event={"ID":"18c28764-b000-46b3-af99-9410c165ff04","Type":"ContainerDied","Data":"c1fc584d364b2a38034b45a41ae48d4b1578d678917fbcdbc91ba6286d576109"} Oct 11 05:22:13 crc kubenswrapper[4651]: I1011 05:22:13.141746 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-zjfbr" Oct 11 05:22:13 crc kubenswrapper[4651]: I1011 05:22:13.156658 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wjtz7\" (UniqueName: \"kubernetes.io/projected/18c28764-b000-46b3-af99-9410c165ff04-kube-api-access-wjtz7\") pod \"18c28764-b000-46b3-af99-9410c165ff04\" (UID: \"18c28764-b000-46b3-af99-9410c165ff04\") " Oct 11 05:22:13 crc kubenswrapper[4651]: I1011 05:22:13.156725 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/18c28764-b000-46b3-af99-9410c165ff04-inventory\") pod \"18c28764-b000-46b3-af99-9410c165ff04\" (UID: \"18c28764-b000-46b3-af99-9410c165ff04\") " Oct 11 05:22:13 crc kubenswrapper[4651]: I1011 05:22:13.156775 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/18c28764-b000-46b3-af99-9410c165ff04-ssh-key\") pod \"18c28764-b000-46b3-af99-9410c165ff04\" (UID: \"18c28764-b000-46b3-af99-9410c165ff04\") " Oct 11 05:22:13 crc kubenswrapper[4651]: I1011 05:22:13.198044 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/18c28764-b000-46b3-af99-9410c165ff04-kube-api-access-wjtz7" (OuterVolumeSpecName: "kube-api-access-wjtz7") pod "18c28764-b000-46b3-af99-9410c165ff04" (UID: "18c28764-b000-46b3-af99-9410c165ff04"). InnerVolumeSpecName "kube-api-access-wjtz7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:22:13 crc kubenswrapper[4651]: I1011 05:22:13.203506 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/18c28764-b000-46b3-af99-9410c165ff04-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "18c28764-b000-46b3-af99-9410c165ff04" (UID: "18c28764-b000-46b3-af99-9410c165ff04"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:22:13 crc kubenswrapper[4651]: I1011 05:22:13.205051 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/18c28764-b000-46b3-af99-9410c165ff04-inventory" (OuterVolumeSpecName: "inventory") pod "18c28764-b000-46b3-af99-9410c165ff04" (UID: "18c28764-b000-46b3-af99-9410c165ff04"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:22:13 crc kubenswrapper[4651]: I1011 05:22:13.260228 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wjtz7\" (UniqueName: \"kubernetes.io/projected/18c28764-b000-46b3-af99-9410c165ff04-kube-api-access-wjtz7\") on node \"crc\" DevicePath \"\"" Oct 11 05:22:13 crc kubenswrapper[4651]: I1011 05:22:13.260433 4651 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/18c28764-b000-46b3-af99-9410c165ff04-inventory\") on node \"crc\" DevicePath \"\"" Oct 11 05:22:13 crc kubenswrapper[4651]: I1011 05:22:13.260451 4651 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/18c28764-b000-46b3-af99-9410c165ff04-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 11 05:22:13 crc kubenswrapper[4651]: I1011 05:22:13.673770 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-zjfbr" Oct 11 05:22:13 crc kubenswrapper[4651]: I1011 05:22:13.673667 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-zjfbr" event={"ID":"18c28764-b000-46b3-af99-9410c165ff04","Type":"ContainerDied","Data":"812aa16fded2a34f58d996d1adb8e825343e93ecdc1d8cd9ba33cba0a0fc1cda"} Oct 11 05:22:13 crc kubenswrapper[4651]: I1011 05:22:13.675994 4651 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="812aa16fded2a34f58d996d1adb8e825343e93ecdc1d8cd9ba33cba0a0fc1cda" Oct 11 05:22:13 crc kubenswrapper[4651]: I1011 05:22:13.778268 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-7vjzq"] Oct 11 05:22:13 crc kubenswrapper[4651]: E1011 05:22:13.778771 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="18c28764-b000-46b3-af99-9410c165ff04" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 11 05:22:13 crc kubenswrapper[4651]: I1011 05:22:13.778790 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="18c28764-b000-46b3-af99-9410c165ff04" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 11 05:22:13 crc kubenswrapper[4651]: I1011 05:22:13.778976 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="18c28764-b000-46b3-af99-9410c165ff04" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 11 05:22:13 crc kubenswrapper[4651]: I1011 05:22:13.779581 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-7vjzq" Oct 11 05:22:13 crc kubenswrapper[4651]: I1011 05:22:13.782728 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 11 05:22:13 crc kubenswrapper[4651]: I1011 05:22:13.782780 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 11 05:22:13 crc kubenswrapper[4651]: I1011 05:22:13.782728 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 11 05:22:13 crc kubenswrapper[4651]: I1011 05:22:13.785362 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-r489p" Oct 11 05:22:13 crc kubenswrapper[4651]: I1011 05:22:13.788794 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-7vjzq"] Oct 11 05:22:13 crc kubenswrapper[4651]: I1011 05:22:13.872192 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dkxsm\" (UniqueName: \"kubernetes.io/projected/0dfc1301-868d-4226-917c-475041f220f5-kube-api-access-dkxsm\") pod \"ssh-known-hosts-edpm-deployment-7vjzq\" (UID: \"0dfc1301-868d-4226-917c-475041f220f5\") " pod="openstack/ssh-known-hosts-edpm-deployment-7vjzq" Oct 11 05:22:13 crc kubenswrapper[4651]: I1011 05:22:13.872387 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/0dfc1301-868d-4226-917c-475041f220f5-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-7vjzq\" (UID: \"0dfc1301-868d-4226-917c-475041f220f5\") " pod="openstack/ssh-known-hosts-edpm-deployment-7vjzq" Oct 11 05:22:13 crc kubenswrapper[4651]: I1011 05:22:13.872440 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/0dfc1301-868d-4226-917c-475041f220f5-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-7vjzq\" (UID: \"0dfc1301-868d-4226-917c-475041f220f5\") " pod="openstack/ssh-known-hosts-edpm-deployment-7vjzq" Oct 11 05:22:13 crc kubenswrapper[4651]: I1011 05:22:13.973321 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/0dfc1301-868d-4226-917c-475041f220f5-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-7vjzq\" (UID: \"0dfc1301-868d-4226-917c-475041f220f5\") " pod="openstack/ssh-known-hosts-edpm-deployment-7vjzq" Oct 11 05:22:13 crc kubenswrapper[4651]: I1011 05:22:13.973386 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/0dfc1301-868d-4226-917c-475041f220f5-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-7vjzq\" (UID: \"0dfc1301-868d-4226-917c-475041f220f5\") " pod="openstack/ssh-known-hosts-edpm-deployment-7vjzq" Oct 11 05:22:13 crc kubenswrapper[4651]: I1011 05:22:13.973470 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dkxsm\" (UniqueName: \"kubernetes.io/projected/0dfc1301-868d-4226-917c-475041f220f5-kube-api-access-dkxsm\") pod \"ssh-known-hosts-edpm-deployment-7vjzq\" (UID: \"0dfc1301-868d-4226-917c-475041f220f5\") " pod="openstack/ssh-known-hosts-edpm-deployment-7vjzq" Oct 11 05:22:13 crc kubenswrapper[4651]: I1011 05:22:13.979173 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/0dfc1301-868d-4226-917c-475041f220f5-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-7vjzq\" (UID: \"0dfc1301-868d-4226-917c-475041f220f5\") " pod="openstack/ssh-known-hosts-edpm-deployment-7vjzq" Oct 11 05:22:13 crc kubenswrapper[4651]: I1011 05:22:13.981767 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/0dfc1301-868d-4226-917c-475041f220f5-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-7vjzq\" (UID: \"0dfc1301-868d-4226-917c-475041f220f5\") " pod="openstack/ssh-known-hosts-edpm-deployment-7vjzq" Oct 11 05:22:13 crc kubenswrapper[4651]: I1011 05:22:13.991883 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dkxsm\" (UniqueName: \"kubernetes.io/projected/0dfc1301-868d-4226-917c-475041f220f5-kube-api-access-dkxsm\") pod \"ssh-known-hosts-edpm-deployment-7vjzq\" (UID: \"0dfc1301-868d-4226-917c-475041f220f5\") " pod="openstack/ssh-known-hosts-edpm-deployment-7vjzq" Oct 11 05:22:14 crc kubenswrapper[4651]: I1011 05:22:14.113853 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-7vjzq" Oct 11 05:22:14 crc kubenswrapper[4651]: I1011 05:22:14.777195 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-7vjzq"] Oct 11 05:22:15 crc kubenswrapper[4651]: I1011 05:22:15.697451 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-7vjzq" event={"ID":"0dfc1301-868d-4226-917c-475041f220f5","Type":"ContainerStarted","Data":"6a21aa7b28d8447d3544b581f53c7a1d324681d80532b2ae98f10e2c0f85c331"} Oct 11 05:22:16 crc kubenswrapper[4651]: I1011 05:22:16.710609 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-7vjzq" event={"ID":"0dfc1301-868d-4226-917c-475041f220f5","Type":"ContainerStarted","Data":"b01c7839e66222bf20f674c726759389421c8c270dbbb44221f1f9cda5e4beae"} Oct 11 05:22:16 crc kubenswrapper[4651]: I1011 05:22:16.735599 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ssh-known-hosts-edpm-deployment-7vjzq" podStartSLOduration=3.095559309 podStartE2EDuration="3.735571143s" podCreationTimestamp="2025-10-11 05:22:13 +0000 UTC" firstStartedPulling="2025-10-11 05:22:14.775323111 +0000 UTC m=+1855.671555907" lastFinishedPulling="2025-10-11 05:22:15.415334935 +0000 UTC m=+1856.311567741" observedRunningTime="2025-10-11 05:22:16.732401252 +0000 UTC m=+1857.628634108" watchObservedRunningTime="2025-10-11 05:22:16.735571143 +0000 UTC m=+1857.631803959" Oct 11 05:22:20 crc kubenswrapper[4651]: I1011 05:22:20.873072 4651 scope.go:117] "RemoveContainer" containerID="9e22517d26caf83c6f2aa3910c4d04d694ebd6ca37f48f88ab2ba796ba35e6e7" Oct 11 05:22:21 crc kubenswrapper[4651]: I1011 05:22:21.772317 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" event={"ID":"519a1ae1-e964-48b0-8b61-835146df28c1","Type":"ContainerStarted","Data":"5009eec8462e179c8ec2fb4c4e90392eec2df5ea9699e07187956c37e75e5cf8"} Oct 11 05:22:23 crc kubenswrapper[4651]: I1011 05:22:23.798863 4651 generic.go:334] "Generic (PLEG): container finished" podID="0dfc1301-868d-4226-917c-475041f220f5" containerID="b01c7839e66222bf20f674c726759389421c8c270dbbb44221f1f9cda5e4beae" exitCode=0 Oct 11 05:22:23 crc kubenswrapper[4651]: I1011 05:22:23.798961 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-7vjzq" event={"ID":"0dfc1301-868d-4226-917c-475041f220f5","Type":"ContainerDied","Data":"b01c7839e66222bf20f674c726759389421c8c270dbbb44221f1f9cda5e4beae"} Oct 11 05:22:25 crc kubenswrapper[4651]: I1011 05:22:25.244349 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-7vjzq" Oct 11 05:22:25 crc kubenswrapper[4651]: I1011 05:22:25.273811 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/0dfc1301-868d-4226-917c-475041f220f5-inventory-0\") pod \"0dfc1301-868d-4226-917c-475041f220f5\" (UID: \"0dfc1301-868d-4226-917c-475041f220f5\") " Oct 11 05:22:25 crc kubenswrapper[4651]: I1011 05:22:25.273931 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dkxsm\" (UniqueName: \"kubernetes.io/projected/0dfc1301-868d-4226-917c-475041f220f5-kube-api-access-dkxsm\") pod \"0dfc1301-868d-4226-917c-475041f220f5\" (UID: \"0dfc1301-868d-4226-917c-475041f220f5\") " Oct 11 05:22:25 crc kubenswrapper[4651]: I1011 05:22:25.273976 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/0dfc1301-868d-4226-917c-475041f220f5-ssh-key-openstack-edpm-ipam\") pod \"0dfc1301-868d-4226-917c-475041f220f5\" (UID: \"0dfc1301-868d-4226-917c-475041f220f5\") " Oct 11 05:22:25 crc kubenswrapper[4651]: I1011 05:22:25.286396 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0dfc1301-868d-4226-917c-475041f220f5-kube-api-access-dkxsm" (OuterVolumeSpecName: "kube-api-access-dkxsm") pod "0dfc1301-868d-4226-917c-475041f220f5" (UID: "0dfc1301-868d-4226-917c-475041f220f5"). InnerVolumeSpecName "kube-api-access-dkxsm". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:22:25 crc kubenswrapper[4651]: I1011 05:22:25.367568 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0dfc1301-868d-4226-917c-475041f220f5-inventory-0" (OuterVolumeSpecName: "inventory-0") pod "0dfc1301-868d-4226-917c-475041f220f5" (UID: "0dfc1301-868d-4226-917c-475041f220f5"). InnerVolumeSpecName "inventory-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:22:25 crc kubenswrapper[4651]: I1011 05:22:25.375877 4651 reconciler_common.go:293] "Volume detached for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/0dfc1301-868d-4226-917c-475041f220f5-inventory-0\") on node \"crc\" DevicePath \"\"" Oct 11 05:22:25 crc kubenswrapper[4651]: I1011 05:22:25.375901 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dkxsm\" (UniqueName: \"kubernetes.io/projected/0dfc1301-868d-4226-917c-475041f220f5-kube-api-access-dkxsm\") on node \"crc\" DevicePath \"\"" Oct 11 05:22:25 crc kubenswrapper[4651]: I1011 05:22:25.376005 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0dfc1301-868d-4226-917c-475041f220f5-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "0dfc1301-868d-4226-917c-475041f220f5" (UID: "0dfc1301-868d-4226-917c-475041f220f5"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:22:25 crc kubenswrapper[4651]: I1011 05:22:25.479006 4651 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/0dfc1301-868d-4226-917c-475041f220f5-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Oct 11 05:22:25 crc kubenswrapper[4651]: I1011 05:22:25.818980 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-7vjzq" event={"ID":"0dfc1301-868d-4226-917c-475041f220f5","Type":"ContainerDied","Data":"6a21aa7b28d8447d3544b581f53c7a1d324681d80532b2ae98f10e2c0f85c331"} Oct 11 05:22:25 crc kubenswrapper[4651]: I1011 05:22:25.819026 4651 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6a21aa7b28d8447d3544b581f53c7a1d324681d80532b2ae98f10e2c0f85c331" Oct 11 05:22:25 crc kubenswrapper[4651]: I1011 05:22:25.819032 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-7vjzq" Oct 11 05:22:25 crc kubenswrapper[4651]: I1011 05:22:25.911627 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-6g9mw"] Oct 11 05:22:25 crc kubenswrapper[4651]: E1011 05:22:25.912058 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0dfc1301-868d-4226-917c-475041f220f5" containerName="ssh-known-hosts-edpm-deployment" Oct 11 05:22:25 crc kubenswrapper[4651]: I1011 05:22:25.912075 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="0dfc1301-868d-4226-917c-475041f220f5" containerName="ssh-known-hosts-edpm-deployment" Oct 11 05:22:25 crc kubenswrapper[4651]: I1011 05:22:25.912274 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="0dfc1301-868d-4226-917c-475041f220f5" containerName="ssh-known-hosts-edpm-deployment" Oct 11 05:22:25 crc kubenswrapper[4651]: I1011 05:22:25.912887 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6g9mw" Oct 11 05:22:25 crc kubenswrapper[4651]: I1011 05:22:25.922141 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 11 05:22:25 crc kubenswrapper[4651]: I1011 05:22:25.922245 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 11 05:22:25 crc kubenswrapper[4651]: I1011 05:22:25.922420 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-r489p" Oct 11 05:22:25 crc kubenswrapper[4651]: I1011 05:22:25.923245 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 11 05:22:25 crc kubenswrapper[4651]: I1011 05:22:25.927335 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-6g9mw"] Oct 11 05:22:25 crc kubenswrapper[4651]: I1011 05:22:25.989962 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qcv9w\" (UniqueName: \"kubernetes.io/projected/cbfb3a24-45a0-4455-984c-134812231d47-kube-api-access-qcv9w\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-6g9mw\" (UID: \"cbfb3a24-45a0-4455-984c-134812231d47\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6g9mw" Oct 11 05:22:25 crc kubenswrapper[4651]: I1011 05:22:25.990264 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/cbfb3a24-45a0-4455-984c-134812231d47-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-6g9mw\" (UID: \"cbfb3a24-45a0-4455-984c-134812231d47\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6g9mw" Oct 11 05:22:25 crc kubenswrapper[4651]: I1011 05:22:25.990332 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cbfb3a24-45a0-4455-984c-134812231d47-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-6g9mw\" (UID: \"cbfb3a24-45a0-4455-984c-134812231d47\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6g9mw" Oct 11 05:22:26 crc kubenswrapper[4651]: I1011 05:22:26.094878 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qcv9w\" (UniqueName: \"kubernetes.io/projected/cbfb3a24-45a0-4455-984c-134812231d47-kube-api-access-qcv9w\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-6g9mw\" (UID: \"cbfb3a24-45a0-4455-984c-134812231d47\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6g9mw" Oct 11 05:22:26 crc kubenswrapper[4651]: I1011 05:22:26.095073 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/cbfb3a24-45a0-4455-984c-134812231d47-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-6g9mw\" (UID: \"cbfb3a24-45a0-4455-984c-134812231d47\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6g9mw" Oct 11 05:22:26 crc kubenswrapper[4651]: I1011 05:22:26.095280 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cbfb3a24-45a0-4455-984c-134812231d47-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-6g9mw\" (UID: \"cbfb3a24-45a0-4455-984c-134812231d47\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6g9mw" Oct 11 05:22:26 crc kubenswrapper[4651]: I1011 05:22:26.103230 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/cbfb3a24-45a0-4455-984c-134812231d47-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-6g9mw\" (UID: \"cbfb3a24-45a0-4455-984c-134812231d47\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6g9mw" Oct 11 05:22:26 crc kubenswrapper[4651]: I1011 05:22:26.108127 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cbfb3a24-45a0-4455-984c-134812231d47-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-6g9mw\" (UID: \"cbfb3a24-45a0-4455-984c-134812231d47\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6g9mw" Oct 11 05:22:26 crc kubenswrapper[4651]: I1011 05:22:26.121947 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qcv9w\" (UniqueName: \"kubernetes.io/projected/cbfb3a24-45a0-4455-984c-134812231d47-kube-api-access-qcv9w\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-6g9mw\" (UID: \"cbfb3a24-45a0-4455-984c-134812231d47\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6g9mw" Oct 11 05:22:26 crc kubenswrapper[4651]: I1011 05:22:26.235540 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6g9mw" Oct 11 05:22:26 crc kubenswrapper[4651]: I1011 05:22:26.644224 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-6g9mw"] Oct 11 05:22:26 crc kubenswrapper[4651]: I1011 05:22:26.835380 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6g9mw" event={"ID":"cbfb3a24-45a0-4455-984c-134812231d47","Type":"ContainerStarted","Data":"5f6a17f4eac4609f7d6ec0dd9dd0dee6e1a5273a593a28765148cc350d52f119"} Oct 11 05:22:27 crc kubenswrapper[4651]: I1011 05:22:27.849533 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6g9mw" event={"ID":"cbfb3a24-45a0-4455-984c-134812231d47","Type":"ContainerStarted","Data":"41c04c57a1702ddf816d41c5340c7d2ca16e53b8e8106bce1b1277bf89cf79ef"} Oct 11 05:22:27 crc kubenswrapper[4651]: I1011 05:22:27.882085 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6g9mw" podStartSLOduration=2.395704087 podStartE2EDuration="2.882069249s" podCreationTimestamp="2025-10-11 05:22:25 +0000 UTC" firstStartedPulling="2025-10-11 05:22:26.65189064 +0000 UTC m=+1867.548123456" lastFinishedPulling="2025-10-11 05:22:27.138255832 +0000 UTC m=+1868.034488618" observedRunningTime="2025-10-11 05:22:27.881049113 +0000 UTC m=+1868.777281919" watchObservedRunningTime="2025-10-11 05:22:27.882069249 +0000 UTC m=+1868.778302045" Oct 11 05:22:35 crc kubenswrapper[4651]: I1011 05:22:35.954791 4651 generic.go:334] "Generic (PLEG): container finished" podID="cbfb3a24-45a0-4455-984c-134812231d47" containerID="41c04c57a1702ddf816d41c5340c7d2ca16e53b8e8106bce1b1277bf89cf79ef" exitCode=0 Oct 11 05:22:35 crc kubenswrapper[4651]: I1011 05:22:35.954921 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6g9mw" event={"ID":"cbfb3a24-45a0-4455-984c-134812231d47","Type":"ContainerDied","Data":"41c04c57a1702ddf816d41c5340c7d2ca16e53b8e8106bce1b1277bf89cf79ef"} Oct 11 05:22:37 crc kubenswrapper[4651]: I1011 05:22:37.453467 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6g9mw" Oct 11 05:22:37 crc kubenswrapper[4651]: I1011 05:22:37.540368 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cbfb3a24-45a0-4455-984c-134812231d47-inventory\") pod \"cbfb3a24-45a0-4455-984c-134812231d47\" (UID: \"cbfb3a24-45a0-4455-984c-134812231d47\") " Oct 11 05:22:37 crc kubenswrapper[4651]: I1011 05:22:37.540527 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qcv9w\" (UniqueName: \"kubernetes.io/projected/cbfb3a24-45a0-4455-984c-134812231d47-kube-api-access-qcv9w\") pod \"cbfb3a24-45a0-4455-984c-134812231d47\" (UID: \"cbfb3a24-45a0-4455-984c-134812231d47\") " Oct 11 05:22:37 crc kubenswrapper[4651]: I1011 05:22:37.540597 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/cbfb3a24-45a0-4455-984c-134812231d47-ssh-key\") pod \"cbfb3a24-45a0-4455-984c-134812231d47\" (UID: \"cbfb3a24-45a0-4455-984c-134812231d47\") " Oct 11 05:22:37 crc kubenswrapper[4651]: I1011 05:22:37.545607 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cbfb3a24-45a0-4455-984c-134812231d47-kube-api-access-qcv9w" (OuterVolumeSpecName: "kube-api-access-qcv9w") pod "cbfb3a24-45a0-4455-984c-134812231d47" (UID: "cbfb3a24-45a0-4455-984c-134812231d47"). InnerVolumeSpecName "kube-api-access-qcv9w". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:22:37 crc kubenswrapper[4651]: I1011 05:22:37.576193 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cbfb3a24-45a0-4455-984c-134812231d47-inventory" (OuterVolumeSpecName: "inventory") pod "cbfb3a24-45a0-4455-984c-134812231d47" (UID: "cbfb3a24-45a0-4455-984c-134812231d47"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:22:37 crc kubenswrapper[4651]: I1011 05:22:37.584701 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cbfb3a24-45a0-4455-984c-134812231d47-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "cbfb3a24-45a0-4455-984c-134812231d47" (UID: "cbfb3a24-45a0-4455-984c-134812231d47"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:22:37 crc kubenswrapper[4651]: I1011 05:22:37.643222 4651 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cbfb3a24-45a0-4455-984c-134812231d47-inventory\") on node \"crc\" DevicePath \"\"" Oct 11 05:22:37 crc kubenswrapper[4651]: I1011 05:22:37.643271 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qcv9w\" (UniqueName: \"kubernetes.io/projected/cbfb3a24-45a0-4455-984c-134812231d47-kube-api-access-qcv9w\") on node \"crc\" DevicePath \"\"" Oct 11 05:22:37 crc kubenswrapper[4651]: I1011 05:22:37.643291 4651 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/cbfb3a24-45a0-4455-984c-134812231d47-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 11 05:22:37 crc kubenswrapper[4651]: I1011 05:22:37.985201 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6g9mw" event={"ID":"cbfb3a24-45a0-4455-984c-134812231d47","Type":"ContainerDied","Data":"5f6a17f4eac4609f7d6ec0dd9dd0dee6e1a5273a593a28765148cc350d52f119"} Oct 11 05:22:37 crc kubenswrapper[4651]: I1011 05:22:37.985245 4651 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5f6a17f4eac4609f7d6ec0dd9dd0dee6e1a5273a593a28765148cc350d52f119" Oct 11 05:22:37 crc kubenswrapper[4651]: I1011 05:22:37.985314 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6g9mw" Oct 11 05:22:38 crc kubenswrapper[4651]: I1011 05:22:38.088414 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-dsw67"] Oct 11 05:22:38 crc kubenswrapper[4651]: E1011 05:22:38.088791 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cbfb3a24-45a0-4455-984c-134812231d47" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Oct 11 05:22:38 crc kubenswrapper[4651]: I1011 05:22:38.088813 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="cbfb3a24-45a0-4455-984c-134812231d47" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Oct 11 05:22:38 crc kubenswrapper[4651]: I1011 05:22:38.089140 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="cbfb3a24-45a0-4455-984c-134812231d47" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Oct 11 05:22:38 crc kubenswrapper[4651]: I1011 05:22:38.089886 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-dsw67" Oct 11 05:22:38 crc kubenswrapper[4651]: I1011 05:22:38.092353 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 11 05:22:38 crc kubenswrapper[4651]: I1011 05:22:38.092426 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 11 05:22:38 crc kubenswrapper[4651]: I1011 05:22:38.092371 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-r489p" Oct 11 05:22:38 crc kubenswrapper[4651]: I1011 05:22:38.096512 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 11 05:22:38 crc kubenswrapper[4651]: I1011 05:22:38.115268 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-dsw67"] Oct 11 05:22:38 crc kubenswrapper[4651]: I1011 05:22:38.154421 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-66vxl\" (UniqueName: \"kubernetes.io/projected/c5dbb723-ec78-4c80-a7fe-10d7499493c7-kube-api-access-66vxl\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-dsw67\" (UID: \"c5dbb723-ec78-4c80-a7fe-10d7499493c7\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-dsw67" Oct 11 05:22:38 crc kubenswrapper[4651]: I1011 05:22:38.154631 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c5dbb723-ec78-4c80-a7fe-10d7499493c7-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-dsw67\" (UID: \"c5dbb723-ec78-4c80-a7fe-10d7499493c7\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-dsw67" Oct 11 05:22:38 crc kubenswrapper[4651]: I1011 05:22:38.154671 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c5dbb723-ec78-4c80-a7fe-10d7499493c7-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-dsw67\" (UID: \"c5dbb723-ec78-4c80-a7fe-10d7499493c7\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-dsw67" Oct 11 05:22:38 crc kubenswrapper[4651]: I1011 05:22:38.256041 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c5dbb723-ec78-4c80-a7fe-10d7499493c7-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-dsw67\" (UID: \"c5dbb723-ec78-4c80-a7fe-10d7499493c7\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-dsw67" Oct 11 05:22:38 crc kubenswrapper[4651]: I1011 05:22:38.256407 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c5dbb723-ec78-4c80-a7fe-10d7499493c7-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-dsw67\" (UID: \"c5dbb723-ec78-4c80-a7fe-10d7499493c7\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-dsw67" Oct 11 05:22:38 crc kubenswrapper[4651]: I1011 05:22:38.256667 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-66vxl\" (UniqueName: \"kubernetes.io/projected/c5dbb723-ec78-4c80-a7fe-10d7499493c7-kube-api-access-66vxl\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-dsw67\" (UID: \"c5dbb723-ec78-4c80-a7fe-10d7499493c7\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-dsw67" Oct 11 05:22:38 crc kubenswrapper[4651]: I1011 05:22:38.260798 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c5dbb723-ec78-4c80-a7fe-10d7499493c7-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-dsw67\" (UID: \"c5dbb723-ec78-4c80-a7fe-10d7499493c7\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-dsw67" Oct 11 05:22:38 crc kubenswrapper[4651]: I1011 05:22:38.261180 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c5dbb723-ec78-4c80-a7fe-10d7499493c7-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-dsw67\" (UID: \"c5dbb723-ec78-4c80-a7fe-10d7499493c7\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-dsw67" Oct 11 05:22:38 crc kubenswrapper[4651]: I1011 05:22:38.279530 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-66vxl\" (UniqueName: \"kubernetes.io/projected/c5dbb723-ec78-4c80-a7fe-10d7499493c7-kube-api-access-66vxl\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-dsw67\" (UID: \"c5dbb723-ec78-4c80-a7fe-10d7499493c7\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-dsw67" Oct 11 05:22:38 crc kubenswrapper[4651]: I1011 05:22:38.416421 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-dsw67" Oct 11 05:22:38 crc kubenswrapper[4651]: I1011 05:22:38.811598 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-dsw67"] Oct 11 05:22:38 crc kubenswrapper[4651]: I1011 05:22:38.996831 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-dsw67" event={"ID":"c5dbb723-ec78-4c80-a7fe-10d7499493c7","Type":"ContainerStarted","Data":"7d2a859306893b6145d262e0e3676d79f6c5409142b681bc47682cfff05d558b"} Oct 11 05:22:40 crc kubenswrapper[4651]: I1011 05:22:40.005538 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-dsw67" event={"ID":"c5dbb723-ec78-4c80-a7fe-10d7499493c7","Type":"ContainerStarted","Data":"6ac8a38a22697acb5ff0d62a91dda26b994f7a5031a8d8419ab3cf3ad449e2c6"} Oct 11 05:22:40 crc kubenswrapper[4651]: I1011 05:22:40.019695 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-dsw67" podStartSLOduration=1.2783963219999999 podStartE2EDuration="2.019679274s" podCreationTimestamp="2025-10-11 05:22:38 +0000 UTC" firstStartedPulling="2025-10-11 05:22:38.815071669 +0000 UTC m=+1879.711304475" lastFinishedPulling="2025-10-11 05:22:39.556354631 +0000 UTC m=+1880.452587427" observedRunningTime="2025-10-11 05:22:40.018582746 +0000 UTC m=+1880.914815562" watchObservedRunningTime="2025-10-11 05:22:40.019679274 +0000 UTC m=+1880.915912070" Oct 11 05:22:51 crc kubenswrapper[4651]: I1011 05:22:51.129572 4651 generic.go:334] "Generic (PLEG): container finished" podID="c5dbb723-ec78-4c80-a7fe-10d7499493c7" containerID="6ac8a38a22697acb5ff0d62a91dda26b994f7a5031a8d8419ab3cf3ad449e2c6" exitCode=0 Oct 11 05:22:51 crc kubenswrapper[4651]: I1011 05:22:51.129646 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-dsw67" event={"ID":"c5dbb723-ec78-4c80-a7fe-10d7499493c7","Type":"ContainerDied","Data":"6ac8a38a22697acb5ff0d62a91dda26b994f7a5031a8d8419ab3cf3ad449e2c6"} Oct 11 05:22:52 crc kubenswrapper[4651]: I1011 05:22:52.521758 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-dsw67" Oct 11 05:22:52 crc kubenswrapper[4651]: I1011 05:22:52.689904 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c5dbb723-ec78-4c80-a7fe-10d7499493c7-inventory\") pod \"c5dbb723-ec78-4c80-a7fe-10d7499493c7\" (UID: \"c5dbb723-ec78-4c80-a7fe-10d7499493c7\") " Oct 11 05:22:52 crc kubenswrapper[4651]: I1011 05:22:52.690025 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-66vxl\" (UniqueName: \"kubernetes.io/projected/c5dbb723-ec78-4c80-a7fe-10d7499493c7-kube-api-access-66vxl\") pod \"c5dbb723-ec78-4c80-a7fe-10d7499493c7\" (UID: \"c5dbb723-ec78-4c80-a7fe-10d7499493c7\") " Oct 11 05:22:52 crc kubenswrapper[4651]: I1011 05:22:52.690683 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c5dbb723-ec78-4c80-a7fe-10d7499493c7-ssh-key\") pod \"c5dbb723-ec78-4c80-a7fe-10d7499493c7\" (UID: \"c5dbb723-ec78-4c80-a7fe-10d7499493c7\") " Oct 11 05:22:52 crc kubenswrapper[4651]: I1011 05:22:52.696521 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c5dbb723-ec78-4c80-a7fe-10d7499493c7-kube-api-access-66vxl" (OuterVolumeSpecName: "kube-api-access-66vxl") pod "c5dbb723-ec78-4c80-a7fe-10d7499493c7" (UID: "c5dbb723-ec78-4c80-a7fe-10d7499493c7"). InnerVolumeSpecName "kube-api-access-66vxl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:22:52 crc kubenswrapper[4651]: I1011 05:22:52.727022 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c5dbb723-ec78-4c80-a7fe-10d7499493c7-inventory" (OuterVolumeSpecName: "inventory") pod "c5dbb723-ec78-4c80-a7fe-10d7499493c7" (UID: "c5dbb723-ec78-4c80-a7fe-10d7499493c7"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:22:52 crc kubenswrapper[4651]: I1011 05:22:52.736898 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c5dbb723-ec78-4c80-a7fe-10d7499493c7-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "c5dbb723-ec78-4c80-a7fe-10d7499493c7" (UID: "c5dbb723-ec78-4c80-a7fe-10d7499493c7"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:22:52 crc kubenswrapper[4651]: I1011 05:22:52.791966 4651 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c5dbb723-ec78-4c80-a7fe-10d7499493c7-inventory\") on node \"crc\" DevicePath \"\"" Oct 11 05:22:52 crc kubenswrapper[4651]: I1011 05:22:52.791999 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-66vxl\" (UniqueName: \"kubernetes.io/projected/c5dbb723-ec78-4c80-a7fe-10d7499493c7-kube-api-access-66vxl\") on node \"crc\" DevicePath \"\"" Oct 11 05:22:52 crc kubenswrapper[4651]: I1011 05:22:52.792010 4651 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c5dbb723-ec78-4c80-a7fe-10d7499493c7-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 11 05:22:53 crc kubenswrapper[4651]: I1011 05:22:53.164095 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-dsw67" event={"ID":"c5dbb723-ec78-4c80-a7fe-10d7499493c7","Type":"ContainerDied","Data":"7d2a859306893b6145d262e0e3676d79f6c5409142b681bc47682cfff05d558b"} Oct 11 05:22:53 crc kubenswrapper[4651]: I1011 05:22:53.164126 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-dsw67" Oct 11 05:22:53 crc kubenswrapper[4651]: I1011 05:22:53.164140 4651 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7d2a859306893b6145d262e0e3676d79f6c5409142b681bc47682cfff05d558b" Oct 11 05:22:53 crc kubenswrapper[4651]: I1011 05:22:53.276651 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-xvd4r"] Oct 11 05:22:53 crc kubenswrapper[4651]: E1011 05:22:53.277104 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5dbb723-ec78-4c80-a7fe-10d7499493c7" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Oct 11 05:22:53 crc kubenswrapper[4651]: I1011 05:22:53.277131 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5dbb723-ec78-4c80-a7fe-10d7499493c7" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Oct 11 05:22:53 crc kubenswrapper[4651]: I1011 05:22:53.277385 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="c5dbb723-ec78-4c80-a7fe-10d7499493c7" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Oct 11 05:22:53 crc kubenswrapper[4651]: I1011 05:22:53.278242 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-xvd4r" Oct 11 05:22:53 crc kubenswrapper[4651]: I1011 05:22:53.284620 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-libvirt-default-certs-0" Oct 11 05:22:53 crc kubenswrapper[4651]: I1011 05:22:53.284671 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 11 05:22:53 crc kubenswrapper[4651]: I1011 05:22:53.285286 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 11 05:22:53 crc kubenswrapper[4651]: I1011 05:22:53.288255 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-xvd4r"] Oct 11 05:22:53 crc kubenswrapper[4651]: I1011 05:22:53.288537 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-ovn-default-certs-0" Oct 11 05:22:53 crc kubenswrapper[4651]: I1011 05:22:53.289004 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-telemetry-default-certs-0" Oct 11 05:22:53 crc kubenswrapper[4651]: I1011 05:22:53.291045 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 11 05:22:53 crc kubenswrapper[4651]: I1011 05:22:53.291213 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-neutron-metadata-default-certs-0" Oct 11 05:22:53 crc kubenswrapper[4651]: I1011 05:22:53.291334 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-r489p" Oct 11 05:22:53 crc kubenswrapper[4651]: I1011 05:22:53.403696 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/751c31c3-37b5-4b70-89ba-3c15aee1b7c3-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-xvd4r\" (UID: \"751c31c3-37b5-4b70-89ba-3c15aee1b7c3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-xvd4r" Oct 11 05:22:53 crc kubenswrapper[4651]: I1011 05:22:53.403750 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/751c31c3-37b5-4b70-89ba-3c15aee1b7c3-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-xvd4r\" (UID: \"751c31c3-37b5-4b70-89ba-3c15aee1b7c3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-xvd4r" Oct 11 05:22:53 crc kubenswrapper[4651]: I1011 05:22:53.403824 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/751c31c3-37b5-4b70-89ba-3c15aee1b7c3-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-xvd4r\" (UID: \"751c31c3-37b5-4b70-89ba-3c15aee1b7c3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-xvd4r" Oct 11 05:22:53 crc kubenswrapper[4651]: I1011 05:22:53.403910 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/751c31c3-37b5-4b70-89ba-3c15aee1b7c3-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-xvd4r\" (UID: \"751c31c3-37b5-4b70-89ba-3c15aee1b7c3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-xvd4r" Oct 11 05:22:53 crc kubenswrapper[4651]: I1011 05:22:53.403947 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/751c31c3-37b5-4b70-89ba-3c15aee1b7c3-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-xvd4r\" (UID: \"751c31c3-37b5-4b70-89ba-3c15aee1b7c3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-xvd4r" Oct 11 05:22:53 crc kubenswrapper[4651]: I1011 05:22:53.403995 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/751c31c3-37b5-4b70-89ba-3c15aee1b7c3-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-xvd4r\" (UID: \"751c31c3-37b5-4b70-89ba-3c15aee1b7c3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-xvd4r" Oct 11 05:22:53 crc kubenswrapper[4651]: I1011 05:22:53.404022 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/751c31c3-37b5-4b70-89ba-3c15aee1b7c3-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-xvd4r\" (UID: \"751c31c3-37b5-4b70-89ba-3c15aee1b7c3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-xvd4r" Oct 11 05:22:53 crc kubenswrapper[4651]: I1011 05:22:53.404069 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/751c31c3-37b5-4b70-89ba-3c15aee1b7c3-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-xvd4r\" (UID: \"751c31c3-37b5-4b70-89ba-3c15aee1b7c3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-xvd4r" Oct 11 05:22:53 crc kubenswrapper[4651]: I1011 05:22:53.404097 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/751c31c3-37b5-4b70-89ba-3c15aee1b7c3-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-xvd4r\" (UID: \"751c31c3-37b5-4b70-89ba-3c15aee1b7c3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-xvd4r" Oct 11 05:22:53 crc kubenswrapper[4651]: I1011 05:22:53.404128 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2dzmc\" (UniqueName: \"kubernetes.io/projected/751c31c3-37b5-4b70-89ba-3c15aee1b7c3-kube-api-access-2dzmc\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-xvd4r\" (UID: \"751c31c3-37b5-4b70-89ba-3c15aee1b7c3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-xvd4r" Oct 11 05:22:53 crc kubenswrapper[4651]: I1011 05:22:53.404151 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/751c31c3-37b5-4b70-89ba-3c15aee1b7c3-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-xvd4r\" (UID: \"751c31c3-37b5-4b70-89ba-3c15aee1b7c3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-xvd4r" Oct 11 05:22:53 crc kubenswrapper[4651]: I1011 05:22:53.404262 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/751c31c3-37b5-4b70-89ba-3c15aee1b7c3-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-xvd4r\" (UID: \"751c31c3-37b5-4b70-89ba-3c15aee1b7c3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-xvd4r" Oct 11 05:22:53 crc kubenswrapper[4651]: I1011 05:22:53.404349 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/751c31c3-37b5-4b70-89ba-3c15aee1b7c3-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-xvd4r\" (UID: \"751c31c3-37b5-4b70-89ba-3c15aee1b7c3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-xvd4r" Oct 11 05:22:53 crc kubenswrapper[4651]: I1011 05:22:53.404396 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/751c31c3-37b5-4b70-89ba-3c15aee1b7c3-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-xvd4r\" (UID: \"751c31c3-37b5-4b70-89ba-3c15aee1b7c3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-xvd4r" Oct 11 05:22:53 crc kubenswrapper[4651]: I1011 05:22:53.505868 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/751c31c3-37b5-4b70-89ba-3c15aee1b7c3-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-xvd4r\" (UID: \"751c31c3-37b5-4b70-89ba-3c15aee1b7c3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-xvd4r" Oct 11 05:22:53 crc kubenswrapper[4651]: I1011 05:22:53.505943 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/751c31c3-37b5-4b70-89ba-3c15aee1b7c3-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-xvd4r\" (UID: \"751c31c3-37b5-4b70-89ba-3c15aee1b7c3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-xvd4r" Oct 11 05:22:53 crc kubenswrapper[4651]: I1011 05:22:53.505969 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/751c31c3-37b5-4b70-89ba-3c15aee1b7c3-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-xvd4r\" (UID: \"751c31c3-37b5-4b70-89ba-3c15aee1b7c3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-xvd4r" Oct 11 05:22:53 crc kubenswrapper[4651]: I1011 05:22:53.506015 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/751c31c3-37b5-4b70-89ba-3c15aee1b7c3-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-xvd4r\" (UID: \"751c31c3-37b5-4b70-89ba-3c15aee1b7c3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-xvd4r" Oct 11 05:22:53 crc kubenswrapper[4651]: I1011 05:22:53.506037 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/751c31c3-37b5-4b70-89ba-3c15aee1b7c3-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-xvd4r\" (UID: \"751c31c3-37b5-4b70-89ba-3c15aee1b7c3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-xvd4r" Oct 11 05:22:53 crc kubenswrapper[4651]: I1011 05:22:53.506082 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/751c31c3-37b5-4b70-89ba-3c15aee1b7c3-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-xvd4r\" (UID: \"751c31c3-37b5-4b70-89ba-3c15aee1b7c3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-xvd4r" Oct 11 05:22:53 crc kubenswrapper[4651]: I1011 05:22:53.506139 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/751c31c3-37b5-4b70-89ba-3c15aee1b7c3-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-xvd4r\" (UID: \"751c31c3-37b5-4b70-89ba-3c15aee1b7c3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-xvd4r" Oct 11 05:22:53 crc kubenswrapper[4651]: I1011 05:22:53.506171 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/751c31c3-37b5-4b70-89ba-3c15aee1b7c3-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-xvd4r\" (UID: \"751c31c3-37b5-4b70-89ba-3c15aee1b7c3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-xvd4r" Oct 11 05:22:53 crc kubenswrapper[4651]: I1011 05:22:53.506212 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/751c31c3-37b5-4b70-89ba-3c15aee1b7c3-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-xvd4r\" (UID: \"751c31c3-37b5-4b70-89ba-3c15aee1b7c3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-xvd4r" Oct 11 05:22:53 crc kubenswrapper[4651]: I1011 05:22:53.506241 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/751c31c3-37b5-4b70-89ba-3c15aee1b7c3-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-xvd4r\" (UID: \"751c31c3-37b5-4b70-89ba-3c15aee1b7c3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-xvd4r" Oct 11 05:22:53 crc kubenswrapper[4651]: I1011 05:22:53.506275 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/751c31c3-37b5-4b70-89ba-3c15aee1b7c3-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-xvd4r\" (UID: \"751c31c3-37b5-4b70-89ba-3c15aee1b7c3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-xvd4r" Oct 11 05:22:53 crc kubenswrapper[4651]: I1011 05:22:53.506300 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/751c31c3-37b5-4b70-89ba-3c15aee1b7c3-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-xvd4r\" (UID: \"751c31c3-37b5-4b70-89ba-3c15aee1b7c3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-xvd4r" Oct 11 05:22:53 crc kubenswrapper[4651]: I1011 05:22:53.506332 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2dzmc\" (UniqueName: \"kubernetes.io/projected/751c31c3-37b5-4b70-89ba-3c15aee1b7c3-kube-api-access-2dzmc\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-xvd4r\" (UID: \"751c31c3-37b5-4b70-89ba-3c15aee1b7c3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-xvd4r" Oct 11 05:22:53 crc kubenswrapper[4651]: I1011 05:22:53.506349 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/751c31c3-37b5-4b70-89ba-3c15aee1b7c3-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-xvd4r\" (UID: \"751c31c3-37b5-4b70-89ba-3c15aee1b7c3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-xvd4r" Oct 11 05:22:53 crc kubenswrapper[4651]: I1011 05:22:53.511216 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/751c31c3-37b5-4b70-89ba-3c15aee1b7c3-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-xvd4r\" (UID: \"751c31c3-37b5-4b70-89ba-3c15aee1b7c3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-xvd4r" Oct 11 05:22:53 crc kubenswrapper[4651]: I1011 05:22:53.511718 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/751c31c3-37b5-4b70-89ba-3c15aee1b7c3-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-xvd4r\" (UID: \"751c31c3-37b5-4b70-89ba-3c15aee1b7c3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-xvd4r" Oct 11 05:22:53 crc kubenswrapper[4651]: I1011 05:22:53.512056 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/751c31c3-37b5-4b70-89ba-3c15aee1b7c3-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-xvd4r\" (UID: \"751c31c3-37b5-4b70-89ba-3c15aee1b7c3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-xvd4r" Oct 11 05:22:53 crc kubenswrapper[4651]: I1011 05:22:53.512959 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/751c31c3-37b5-4b70-89ba-3c15aee1b7c3-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-xvd4r\" (UID: \"751c31c3-37b5-4b70-89ba-3c15aee1b7c3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-xvd4r" Oct 11 05:22:53 crc kubenswrapper[4651]: I1011 05:22:53.512964 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/751c31c3-37b5-4b70-89ba-3c15aee1b7c3-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-xvd4r\" (UID: \"751c31c3-37b5-4b70-89ba-3c15aee1b7c3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-xvd4r" Oct 11 05:22:53 crc kubenswrapper[4651]: I1011 05:22:53.515045 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/751c31c3-37b5-4b70-89ba-3c15aee1b7c3-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-xvd4r\" (UID: \"751c31c3-37b5-4b70-89ba-3c15aee1b7c3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-xvd4r" Oct 11 05:22:53 crc kubenswrapper[4651]: I1011 05:22:53.516023 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/751c31c3-37b5-4b70-89ba-3c15aee1b7c3-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-xvd4r\" (UID: \"751c31c3-37b5-4b70-89ba-3c15aee1b7c3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-xvd4r" Oct 11 05:22:53 crc kubenswrapper[4651]: I1011 05:22:53.517422 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/751c31c3-37b5-4b70-89ba-3c15aee1b7c3-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-xvd4r\" (UID: \"751c31c3-37b5-4b70-89ba-3c15aee1b7c3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-xvd4r" Oct 11 05:22:53 crc kubenswrapper[4651]: I1011 05:22:53.517891 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/751c31c3-37b5-4b70-89ba-3c15aee1b7c3-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-xvd4r\" (UID: \"751c31c3-37b5-4b70-89ba-3c15aee1b7c3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-xvd4r" Oct 11 05:22:53 crc kubenswrapper[4651]: I1011 05:22:53.517931 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/751c31c3-37b5-4b70-89ba-3c15aee1b7c3-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-xvd4r\" (UID: \"751c31c3-37b5-4b70-89ba-3c15aee1b7c3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-xvd4r" Oct 11 05:22:53 crc kubenswrapper[4651]: I1011 05:22:53.518279 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/751c31c3-37b5-4b70-89ba-3c15aee1b7c3-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-xvd4r\" (UID: \"751c31c3-37b5-4b70-89ba-3c15aee1b7c3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-xvd4r" Oct 11 05:22:53 crc kubenswrapper[4651]: I1011 05:22:53.520956 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/751c31c3-37b5-4b70-89ba-3c15aee1b7c3-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-xvd4r\" (UID: \"751c31c3-37b5-4b70-89ba-3c15aee1b7c3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-xvd4r" Oct 11 05:22:53 crc kubenswrapper[4651]: I1011 05:22:53.523798 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/751c31c3-37b5-4b70-89ba-3c15aee1b7c3-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-xvd4r\" (UID: \"751c31c3-37b5-4b70-89ba-3c15aee1b7c3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-xvd4r" Oct 11 05:22:53 crc kubenswrapper[4651]: I1011 05:22:53.538552 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2dzmc\" (UniqueName: \"kubernetes.io/projected/751c31c3-37b5-4b70-89ba-3c15aee1b7c3-kube-api-access-2dzmc\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-xvd4r\" (UID: \"751c31c3-37b5-4b70-89ba-3c15aee1b7c3\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-xvd4r" Oct 11 05:22:53 crc kubenswrapper[4651]: I1011 05:22:53.623671 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-xvd4r" Oct 11 05:22:54 crc kubenswrapper[4651]: I1011 05:22:54.255939 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-xvd4r"] Oct 11 05:22:54 crc kubenswrapper[4651]: W1011 05:22:54.258346 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod751c31c3_37b5_4b70_89ba_3c15aee1b7c3.slice/crio-938549e2c4db1e6f8949b2195566bce97b9b88fa5887c4b8cc8bea5653a088cf WatchSource:0}: Error finding container 938549e2c4db1e6f8949b2195566bce97b9b88fa5887c4b8cc8bea5653a088cf: Status 404 returned error can't find the container with id 938549e2c4db1e6f8949b2195566bce97b9b88fa5887c4b8cc8bea5653a088cf Oct 11 05:22:55 crc kubenswrapper[4651]: I1011 05:22:55.185919 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-xvd4r" event={"ID":"751c31c3-37b5-4b70-89ba-3c15aee1b7c3","Type":"ContainerStarted","Data":"c716aebd7382d805a55c72c0cd211b4f2203d508223f5bf43bcb3e8d2baab6f3"} Oct 11 05:22:55 crc kubenswrapper[4651]: I1011 05:22:55.186587 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-xvd4r" event={"ID":"751c31c3-37b5-4b70-89ba-3c15aee1b7c3","Type":"ContainerStarted","Data":"938549e2c4db1e6f8949b2195566bce97b9b88fa5887c4b8cc8bea5653a088cf"} Oct 11 05:22:55 crc kubenswrapper[4651]: I1011 05:22:55.212909 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-xvd4r" podStartSLOduration=1.751377889 podStartE2EDuration="2.212866244s" podCreationTimestamp="2025-10-11 05:22:53 +0000 UTC" firstStartedPulling="2025-10-11 05:22:54.260387779 +0000 UTC m=+1895.156620575" lastFinishedPulling="2025-10-11 05:22:54.721876134 +0000 UTC m=+1895.618108930" observedRunningTime="2025-10-11 05:22:55.202803267 +0000 UTC m=+1896.099036073" watchObservedRunningTime="2025-10-11 05:22:55.212866244 +0000 UTC m=+1896.109099080" Oct 11 05:23:37 crc kubenswrapper[4651]: I1011 05:23:37.694610 4651 generic.go:334] "Generic (PLEG): container finished" podID="751c31c3-37b5-4b70-89ba-3c15aee1b7c3" containerID="c716aebd7382d805a55c72c0cd211b4f2203d508223f5bf43bcb3e8d2baab6f3" exitCode=0 Oct 11 05:23:37 crc kubenswrapper[4651]: I1011 05:23:37.694731 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-xvd4r" event={"ID":"751c31c3-37b5-4b70-89ba-3c15aee1b7c3","Type":"ContainerDied","Data":"c716aebd7382d805a55c72c0cd211b4f2203d508223f5bf43bcb3e8d2baab6f3"} Oct 11 05:23:39 crc kubenswrapper[4651]: I1011 05:23:39.204008 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-xvd4r" Oct 11 05:23:39 crc kubenswrapper[4651]: I1011 05:23:39.393373 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/751c31c3-37b5-4b70-89ba-3c15aee1b7c3-bootstrap-combined-ca-bundle\") pod \"751c31c3-37b5-4b70-89ba-3c15aee1b7c3\" (UID: \"751c31c3-37b5-4b70-89ba-3c15aee1b7c3\") " Oct 11 05:23:39 crc kubenswrapper[4651]: I1011 05:23:39.393443 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/751c31c3-37b5-4b70-89ba-3c15aee1b7c3-libvirt-combined-ca-bundle\") pod \"751c31c3-37b5-4b70-89ba-3c15aee1b7c3\" (UID: \"751c31c3-37b5-4b70-89ba-3c15aee1b7c3\") " Oct 11 05:23:39 crc kubenswrapper[4651]: I1011 05:23:39.393508 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/751c31c3-37b5-4b70-89ba-3c15aee1b7c3-ovn-combined-ca-bundle\") pod \"751c31c3-37b5-4b70-89ba-3c15aee1b7c3\" (UID: \"751c31c3-37b5-4b70-89ba-3c15aee1b7c3\") " Oct 11 05:23:39 crc kubenswrapper[4651]: I1011 05:23:39.393569 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/751c31c3-37b5-4b70-89ba-3c15aee1b7c3-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"751c31c3-37b5-4b70-89ba-3c15aee1b7c3\" (UID: \"751c31c3-37b5-4b70-89ba-3c15aee1b7c3\") " Oct 11 05:23:39 crc kubenswrapper[4651]: I1011 05:23:39.393648 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/751c31c3-37b5-4b70-89ba-3c15aee1b7c3-telemetry-combined-ca-bundle\") pod \"751c31c3-37b5-4b70-89ba-3c15aee1b7c3\" (UID: \"751c31c3-37b5-4b70-89ba-3c15aee1b7c3\") " Oct 11 05:23:39 crc kubenswrapper[4651]: I1011 05:23:39.393768 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/751c31c3-37b5-4b70-89ba-3c15aee1b7c3-ssh-key\") pod \"751c31c3-37b5-4b70-89ba-3c15aee1b7c3\" (UID: \"751c31c3-37b5-4b70-89ba-3c15aee1b7c3\") " Oct 11 05:23:39 crc kubenswrapper[4651]: I1011 05:23:39.393859 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/751c31c3-37b5-4b70-89ba-3c15aee1b7c3-repo-setup-combined-ca-bundle\") pod \"751c31c3-37b5-4b70-89ba-3c15aee1b7c3\" (UID: \"751c31c3-37b5-4b70-89ba-3c15aee1b7c3\") " Oct 11 05:23:39 crc kubenswrapper[4651]: I1011 05:23:39.393900 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/751c31c3-37b5-4b70-89ba-3c15aee1b7c3-neutron-metadata-combined-ca-bundle\") pod \"751c31c3-37b5-4b70-89ba-3c15aee1b7c3\" (UID: \"751c31c3-37b5-4b70-89ba-3c15aee1b7c3\") " Oct 11 05:23:39 crc kubenswrapper[4651]: I1011 05:23:39.394005 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2dzmc\" (UniqueName: \"kubernetes.io/projected/751c31c3-37b5-4b70-89ba-3c15aee1b7c3-kube-api-access-2dzmc\") pod \"751c31c3-37b5-4b70-89ba-3c15aee1b7c3\" (UID: \"751c31c3-37b5-4b70-89ba-3c15aee1b7c3\") " Oct 11 05:23:39 crc kubenswrapper[4651]: I1011 05:23:39.394078 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/751c31c3-37b5-4b70-89ba-3c15aee1b7c3-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"751c31c3-37b5-4b70-89ba-3c15aee1b7c3\" (UID: \"751c31c3-37b5-4b70-89ba-3c15aee1b7c3\") " Oct 11 05:23:39 crc kubenswrapper[4651]: I1011 05:23:39.394117 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/751c31c3-37b5-4b70-89ba-3c15aee1b7c3-inventory\") pod \"751c31c3-37b5-4b70-89ba-3c15aee1b7c3\" (UID: \"751c31c3-37b5-4b70-89ba-3c15aee1b7c3\") " Oct 11 05:23:39 crc kubenswrapper[4651]: I1011 05:23:39.394150 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/751c31c3-37b5-4b70-89ba-3c15aee1b7c3-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"751c31c3-37b5-4b70-89ba-3c15aee1b7c3\" (UID: \"751c31c3-37b5-4b70-89ba-3c15aee1b7c3\") " Oct 11 05:23:39 crc kubenswrapper[4651]: I1011 05:23:39.394191 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/751c31c3-37b5-4b70-89ba-3c15aee1b7c3-nova-combined-ca-bundle\") pod \"751c31c3-37b5-4b70-89ba-3c15aee1b7c3\" (UID: \"751c31c3-37b5-4b70-89ba-3c15aee1b7c3\") " Oct 11 05:23:39 crc kubenswrapper[4651]: I1011 05:23:39.394244 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/751c31c3-37b5-4b70-89ba-3c15aee1b7c3-openstack-edpm-ipam-ovn-default-certs-0\") pod \"751c31c3-37b5-4b70-89ba-3c15aee1b7c3\" (UID: \"751c31c3-37b5-4b70-89ba-3c15aee1b7c3\") " Oct 11 05:23:39 crc kubenswrapper[4651]: I1011 05:23:39.399183 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/751c31c3-37b5-4b70-89ba-3c15aee1b7c3-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "751c31c3-37b5-4b70-89ba-3c15aee1b7c3" (UID: "751c31c3-37b5-4b70-89ba-3c15aee1b7c3"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:23:39 crc kubenswrapper[4651]: I1011 05:23:39.399755 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/751c31c3-37b5-4b70-89ba-3c15aee1b7c3-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "751c31c3-37b5-4b70-89ba-3c15aee1b7c3" (UID: "751c31c3-37b5-4b70-89ba-3c15aee1b7c3"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:23:39 crc kubenswrapper[4651]: I1011 05:23:39.399893 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/751c31c3-37b5-4b70-89ba-3c15aee1b7c3-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "751c31c3-37b5-4b70-89ba-3c15aee1b7c3" (UID: "751c31c3-37b5-4b70-89ba-3c15aee1b7c3"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:23:39 crc kubenswrapper[4651]: I1011 05:23:39.399953 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/751c31c3-37b5-4b70-89ba-3c15aee1b7c3-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "751c31c3-37b5-4b70-89ba-3c15aee1b7c3" (UID: "751c31c3-37b5-4b70-89ba-3c15aee1b7c3"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:23:39 crc kubenswrapper[4651]: I1011 05:23:39.400718 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/751c31c3-37b5-4b70-89ba-3c15aee1b7c3-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "751c31c3-37b5-4b70-89ba-3c15aee1b7c3" (UID: "751c31c3-37b5-4b70-89ba-3c15aee1b7c3"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:23:39 crc kubenswrapper[4651]: I1011 05:23:39.401763 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/751c31c3-37b5-4b70-89ba-3c15aee1b7c3-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "751c31c3-37b5-4b70-89ba-3c15aee1b7c3" (UID: "751c31c3-37b5-4b70-89ba-3c15aee1b7c3"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:23:39 crc kubenswrapper[4651]: I1011 05:23:39.401856 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/751c31c3-37b5-4b70-89ba-3c15aee1b7c3-openstack-edpm-ipam-ovn-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-ovn-default-certs-0") pod "751c31c3-37b5-4b70-89ba-3c15aee1b7c3" (UID: "751c31c3-37b5-4b70-89ba-3c15aee1b7c3"). InnerVolumeSpecName "openstack-edpm-ipam-ovn-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:23:39 crc kubenswrapper[4651]: I1011 05:23:39.402605 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/751c31c3-37b5-4b70-89ba-3c15aee1b7c3-openstack-edpm-ipam-libvirt-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-libvirt-default-certs-0") pod "751c31c3-37b5-4b70-89ba-3c15aee1b7c3" (UID: "751c31c3-37b5-4b70-89ba-3c15aee1b7c3"). InnerVolumeSpecName "openstack-edpm-ipam-libvirt-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:23:39 crc kubenswrapper[4651]: I1011 05:23:39.403211 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/751c31c3-37b5-4b70-89ba-3c15aee1b7c3-openstack-edpm-ipam-telemetry-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-telemetry-default-certs-0") pod "751c31c3-37b5-4b70-89ba-3c15aee1b7c3" (UID: "751c31c3-37b5-4b70-89ba-3c15aee1b7c3"). InnerVolumeSpecName "openstack-edpm-ipam-telemetry-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:23:39 crc kubenswrapper[4651]: I1011 05:23:39.404059 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/751c31c3-37b5-4b70-89ba-3c15aee1b7c3-openstack-edpm-ipam-neutron-metadata-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-neutron-metadata-default-certs-0") pod "751c31c3-37b5-4b70-89ba-3c15aee1b7c3" (UID: "751c31c3-37b5-4b70-89ba-3c15aee1b7c3"). InnerVolumeSpecName "openstack-edpm-ipam-neutron-metadata-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:23:39 crc kubenswrapper[4651]: I1011 05:23:39.404135 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/751c31c3-37b5-4b70-89ba-3c15aee1b7c3-kube-api-access-2dzmc" (OuterVolumeSpecName: "kube-api-access-2dzmc") pod "751c31c3-37b5-4b70-89ba-3c15aee1b7c3" (UID: "751c31c3-37b5-4b70-89ba-3c15aee1b7c3"). InnerVolumeSpecName "kube-api-access-2dzmc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:23:39 crc kubenswrapper[4651]: I1011 05:23:39.414387 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/751c31c3-37b5-4b70-89ba-3c15aee1b7c3-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "751c31c3-37b5-4b70-89ba-3c15aee1b7c3" (UID: "751c31c3-37b5-4b70-89ba-3c15aee1b7c3"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:23:39 crc kubenswrapper[4651]: I1011 05:23:39.425993 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/751c31c3-37b5-4b70-89ba-3c15aee1b7c3-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "751c31c3-37b5-4b70-89ba-3c15aee1b7c3" (UID: "751c31c3-37b5-4b70-89ba-3c15aee1b7c3"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:23:39 crc kubenswrapper[4651]: I1011 05:23:39.441105 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/751c31c3-37b5-4b70-89ba-3c15aee1b7c3-inventory" (OuterVolumeSpecName: "inventory") pod "751c31c3-37b5-4b70-89ba-3c15aee1b7c3" (UID: "751c31c3-37b5-4b70-89ba-3c15aee1b7c3"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:23:39 crc kubenswrapper[4651]: I1011 05:23:39.496931 4651 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/751c31c3-37b5-4b70-89ba-3c15aee1b7c3-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 05:23:39 crc kubenswrapper[4651]: I1011 05:23:39.496981 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2dzmc\" (UniqueName: \"kubernetes.io/projected/751c31c3-37b5-4b70-89ba-3c15aee1b7c3-kube-api-access-2dzmc\") on node \"crc\" DevicePath \"\"" Oct 11 05:23:39 crc kubenswrapper[4651]: I1011 05:23:39.497004 4651 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/751c31c3-37b5-4b70-89ba-3c15aee1b7c3-openstack-edpm-ipam-telemetry-default-certs-0\") on node \"crc\" DevicePath \"\"" Oct 11 05:23:39 crc kubenswrapper[4651]: I1011 05:23:39.497024 4651 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/751c31c3-37b5-4b70-89ba-3c15aee1b7c3-inventory\") on node \"crc\" DevicePath \"\"" Oct 11 05:23:39 crc kubenswrapper[4651]: I1011 05:23:39.497043 4651 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/751c31c3-37b5-4b70-89ba-3c15aee1b7c3-openstack-edpm-ipam-libvirt-default-certs-0\") on node \"crc\" DevicePath \"\"" Oct 11 05:23:39 crc kubenswrapper[4651]: I1011 05:23:39.497061 4651 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/751c31c3-37b5-4b70-89ba-3c15aee1b7c3-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 05:23:39 crc kubenswrapper[4651]: I1011 05:23:39.497079 4651 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/751c31c3-37b5-4b70-89ba-3c15aee1b7c3-openstack-edpm-ipam-ovn-default-certs-0\") on node \"crc\" DevicePath \"\"" Oct 11 05:23:39 crc kubenswrapper[4651]: I1011 05:23:39.497097 4651 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/751c31c3-37b5-4b70-89ba-3c15aee1b7c3-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 05:23:39 crc kubenswrapper[4651]: I1011 05:23:39.497116 4651 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/751c31c3-37b5-4b70-89ba-3c15aee1b7c3-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 05:23:39 crc kubenswrapper[4651]: I1011 05:23:39.497134 4651 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/751c31c3-37b5-4b70-89ba-3c15aee1b7c3-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 05:23:39 crc kubenswrapper[4651]: I1011 05:23:39.497151 4651 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/751c31c3-37b5-4b70-89ba-3c15aee1b7c3-openstack-edpm-ipam-neutron-metadata-default-certs-0\") on node \"crc\" DevicePath \"\"" Oct 11 05:23:39 crc kubenswrapper[4651]: I1011 05:23:39.497170 4651 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/751c31c3-37b5-4b70-89ba-3c15aee1b7c3-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 05:23:39 crc kubenswrapper[4651]: I1011 05:23:39.497188 4651 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/751c31c3-37b5-4b70-89ba-3c15aee1b7c3-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 11 05:23:39 crc kubenswrapper[4651]: I1011 05:23:39.497204 4651 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/751c31c3-37b5-4b70-89ba-3c15aee1b7c3-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 05:23:39 crc kubenswrapper[4651]: I1011 05:23:39.719867 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-xvd4r" event={"ID":"751c31c3-37b5-4b70-89ba-3c15aee1b7c3","Type":"ContainerDied","Data":"938549e2c4db1e6f8949b2195566bce97b9b88fa5887c4b8cc8bea5653a088cf"} Oct 11 05:23:39 crc kubenswrapper[4651]: I1011 05:23:39.719929 4651 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="938549e2c4db1e6f8949b2195566bce97b9b88fa5887c4b8cc8bea5653a088cf" Oct 11 05:23:39 crc kubenswrapper[4651]: I1011 05:23:39.720503 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-xvd4r" Oct 11 05:23:39 crc kubenswrapper[4651]: I1011 05:23:39.966840 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-cfgdk"] Oct 11 05:23:39 crc kubenswrapper[4651]: E1011 05:23:39.967528 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="751c31c3-37b5-4b70-89ba-3c15aee1b7c3" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Oct 11 05:23:39 crc kubenswrapper[4651]: I1011 05:23:39.967553 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="751c31c3-37b5-4b70-89ba-3c15aee1b7c3" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Oct 11 05:23:39 crc kubenswrapper[4651]: I1011 05:23:39.967788 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="751c31c3-37b5-4b70-89ba-3c15aee1b7c3" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Oct 11 05:23:39 crc kubenswrapper[4651]: I1011 05:23:39.968517 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-cfgdk" Oct 11 05:23:39 crc kubenswrapper[4651]: I1011 05:23:39.970786 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 11 05:23:39 crc kubenswrapper[4651]: I1011 05:23:39.970923 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 11 05:23:39 crc kubenswrapper[4651]: I1011 05:23:39.972855 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 11 05:23:39 crc kubenswrapper[4651]: I1011 05:23:39.973469 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-config" Oct 11 05:23:39 crc kubenswrapper[4651]: I1011 05:23:39.973803 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-r489p" Oct 11 05:23:39 crc kubenswrapper[4651]: I1011 05:23:39.993384 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-cfgdk"] Oct 11 05:23:40 crc kubenswrapper[4651]: I1011 05:23:40.107180 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b0507706-1820-417c-824e-e8420fda7baa-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-cfgdk\" (UID: \"b0507706-1820-417c-824e-e8420fda7baa\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-cfgdk" Oct 11 05:23:40 crc kubenswrapper[4651]: I1011 05:23:40.107245 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/b0507706-1820-417c-824e-e8420fda7baa-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-cfgdk\" (UID: \"b0507706-1820-417c-824e-e8420fda7baa\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-cfgdk" Oct 11 05:23:40 crc kubenswrapper[4651]: I1011 05:23:40.107289 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b0507706-1820-417c-824e-e8420fda7baa-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-cfgdk\" (UID: \"b0507706-1820-417c-824e-e8420fda7baa\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-cfgdk" Oct 11 05:23:40 crc kubenswrapper[4651]: I1011 05:23:40.109399 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b0507706-1820-417c-824e-e8420fda7baa-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-cfgdk\" (UID: \"b0507706-1820-417c-824e-e8420fda7baa\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-cfgdk" Oct 11 05:23:40 crc kubenswrapper[4651]: I1011 05:23:40.109525 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-48s7x\" (UniqueName: \"kubernetes.io/projected/b0507706-1820-417c-824e-e8420fda7baa-kube-api-access-48s7x\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-cfgdk\" (UID: \"b0507706-1820-417c-824e-e8420fda7baa\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-cfgdk" Oct 11 05:23:40 crc kubenswrapper[4651]: I1011 05:23:40.211079 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b0507706-1820-417c-824e-e8420fda7baa-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-cfgdk\" (UID: \"b0507706-1820-417c-824e-e8420fda7baa\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-cfgdk" Oct 11 05:23:40 crc kubenswrapper[4651]: I1011 05:23:40.211164 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-48s7x\" (UniqueName: \"kubernetes.io/projected/b0507706-1820-417c-824e-e8420fda7baa-kube-api-access-48s7x\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-cfgdk\" (UID: \"b0507706-1820-417c-824e-e8420fda7baa\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-cfgdk" Oct 11 05:23:40 crc kubenswrapper[4651]: I1011 05:23:40.211209 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b0507706-1820-417c-824e-e8420fda7baa-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-cfgdk\" (UID: \"b0507706-1820-417c-824e-e8420fda7baa\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-cfgdk" Oct 11 05:23:40 crc kubenswrapper[4651]: I1011 05:23:40.211240 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/b0507706-1820-417c-824e-e8420fda7baa-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-cfgdk\" (UID: \"b0507706-1820-417c-824e-e8420fda7baa\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-cfgdk" Oct 11 05:23:40 crc kubenswrapper[4651]: I1011 05:23:40.211277 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b0507706-1820-417c-824e-e8420fda7baa-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-cfgdk\" (UID: \"b0507706-1820-417c-824e-e8420fda7baa\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-cfgdk" Oct 11 05:23:40 crc kubenswrapper[4651]: I1011 05:23:40.212526 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/b0507706-1820-417c-824e-e8420fda7baa-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-cfgdk\" (UID: \"b0507706-1820-417c-824e-e8420fda7baa\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-cfgdk" Oct 11 05:23:40 crc kubenswrapper[4651]: I1011 05:23:40.218643 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b0507706-1820-417c-824e-e8420fda7baa-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-cfgdk\" (UID: \"b0507706-1820-417c-824e-e8420fda7baa\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-cfgdk" Oct 11 05:23:40 crc kubenswrapper[4651]: I1011 05:23:40.220222 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b0507706-1820-417c-824e-e8420fda7baa-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-cfgdk\" (UID: \"b0507706-1820-417c-824e-e8420fda7baa\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-cfgdk" Oct 11 05:23:40 crc kubenswrapper[4651]: I1011 05:23:40.221038 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b0507706-1820-417c-824e-e8420fda7baa-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-cfgdk\" (UID: \"b0507706-1820-417c-824e-e8420fda7baa\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-cfgdk" Oct 11 05:23:40 crc kubenswrapper[4651]: I1011 05:23:40.235738 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-48s7x\" (UniqueName: \"kubernetes.io/projected/b0507706-1820-417c-824e-e8420fda7baa-kube-api-access-48s7x\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-cfgdk\" (UID: \"b0507706-1820-417c-824e-e8420fda7baa\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-cfgdk" Oct 11 05:23:40 crc kubenswrapper[4651]: I1011 05:23:40.295800 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-cfgdk" Oct 11 05:23:40 crc kubenswrapper[4651]: I1011 05:23:40.969616 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-cfgdk"] Oct 11 05:23:40 crc kubenswrapper[4651]: I1011 05:23:40.979642 4651 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 11 05:23:41 crc kubenswrapper[4651]: I1011 05:23:41.743649 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-cfgdk" event={"ID":"b0507706-1820-417c-824e-e8420fda7baa","Type":"ContainerStarted","Data":"4a729d87425f76933fce8561638ccca259ce6100163b5e847452d84ee4d4c668"} Oct 11 05:23:42 crc kubenswrapper[4651]: I1011 05:23:42.761613 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-cfgdk" event={"ID":"b0507706-1820-417c-824e-e8420fda7baa","Type":"ContainerStarted","Data":"46ed1e044495ec6c37a2feb15a5e9f15b69eacb76667420f60bc4da4e27a9ef8"} Oct 11 05:23:42 crc kubenswrapper[4651]: I1011 05:23:42.785400 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-cfgdk" podStartSLOduration=3.267779645 podStartE2EDuration="3.785381033s" podCreationTimestamp="2025-10-11 05:23:39 +0000 UTC" firstStartedPulling="2025-10-11 05:23:40.979076892 +0000 UTC m=+1941.875309728" lastFinishedPulling="2025-10-11 05:23:41.49667827 +0000 UTC m=+1942.392911116" observedRunningTime="2025-10-11 05:23:42.782241493 +0000 UTC m=+1943.678474319" watchObservedRunningTime="2025-10-11 05:23:42.785381033 +0000 UTC m=+1943.681613839" Oct 11 05:24:46 crc kubenswrapper[4651]: I1011 05:24:46.310580 4651 patch_prober.go:28] interesting pod/machine-config-daemon-78jnv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 05:24:46 crc kubenswrapper[4651]: I1011 05:24:46.311411 4651 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 05:24:53 crc kubenswrapper[4651]: I1011 05:24:53.524908 4651 generic.go:334] "Generic (PLEG): container finished" podID="b0507706-1820-417c-824e-e8420fda7baa" containerID="46ed1e044495ec6c37a2feb15a5e9f15b69eacb76667420f60bc4da4e27a9ef8" exitCode=0 Oct 11 05:24:53 crc kubenswrapper[4651]: I1011 05:24:53.525215 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-cfgdk" event={"ID":"b0507706-1820-417c-824e-e8420fda7baa","Type":"ContainerDied","Data":"46ed1e044495ec6c37a2feb15a5e9f15b69eacb76667420f60bc4da4e27a9ef8"} Oct 11 05:24:55 crc kubenswrapper[4651]: I1011 05:24:55.042598 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-cfgdk" Oct 11 05:24:55 crc kubenswrapper[4651]: I1011 05:24:55.096837 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/b0507706-1820-417c-824e-e8420fda7baa-ovncontroller-config-0\") pod \"b0507706-1820-417c-824e-e8420fda7baa\" (UID: \"b0507706-1820-417c-824e-e8420fda7baa\") " Oct 11 05:24:55 crc kubenswrapper[4651]: I1011 05:24:55.096988 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b0507706-1820-417c-824e-e8420fda7baa-ssh-key\") pod \"b0507706-1820-417c-824e-e8420fda7baa\" (UID: \"b0507706-1820-417c-824e-e8420fda7baa\") " Oct 11 05:24:55 crc kubenswrapper[4651]: I1011 05:24:55.097127 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b0507706-1820-417c-824e-e8420fda7baa-ovn-combined-ca-bundle\") pod \"b0507706-1820-417c-824e-e8420fda7baa\" (UID: \"b0507706-1820-417c-824e-e8420fda7baa\") " Oct 11 05:24:55 crc kubenswrapper[4651]: I1011 05:24:55.097164 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-48s7x\" (UniqueName: \"kubernetes.io/projected/b0507706-1820-417c-824e-e8420fda7baa-kube-api-access-48s7x\") pod \"b0507706-1820-417c-824e-e8420fda7baa\" (UID: \"b0507706-1820-417c-824e-e8420fda7baa\") " Oct 11 05:24:55 crc kubenswrapper[4651]: I1011 05:24:55.097290 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b0507706-1820-417c-824e-e8420fda7baa-inventory\") pod \"b0507706-1820-417c-824e-e8420fda7baa\" (UID: \"b0507706-1820-417c-824e-e8420fda7baa\") " Oct 11 05:24:55 crc kubenswrapper[4651]: I1011 05:24:55.105310 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b0507706-1820-417c-824e-e8420fda7baa-kube-api-access-48s7x" (OuterVolumeSpecName: "kube-api-access-48s7x") pod "b0507706-1820-417c-824e-e8420fda7baa" (UID: "b0507706-1820-417c-824e-e8420fda7baa"). InnerVolumeSpecName "kube-api-access-48s7x". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:24:55 crc kubenswrapper[4651]: I1011 05:24:55.105850 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b0507706-1820-417c-824e-e8420fda7baa-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "b0507706-1820-417c-824e-e8420fda7baa" (UID: "b0507706-1820-417c-824e-e8420fda7baa"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:24:55 crc kubenswrapper[4651]: I1011 05:24:55.124525 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b0507706-1820-417c-824e-e8420fda7baa-ovncontroller-config-0" (OuterVolumeSpecName: "ovncontroller-config-0") pod "b0507706-1820-417c-824e-e8420fda7baa" (UID: "b0507706-1820-417c-824e-e8420fda7baa"). InnerVolumeSpecName "ovncontroller-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:24:55 crc kubenswrapper[4651]: I1011 05:24:55.130320 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b0507706-1820-417c-824e-e8420fda7baa-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "b0507706-1820-417c-824e-e8420fda7baa" (UID: "b0507706-1820-417c-824e-e8420fda7baa"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:24:55 crc kubenswrapper[4651]: I1011 05:24:55.138177 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b0507706-1820-417c-824e-e8420fda7baa-inventory" (OuterVolumeSpecName: "inventory") pod "b0507706-1820-417c-824e-e8420fda7baa" (UID: "b0507706-1820-417c-824e-e8420fda7baa"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:24:55 crc kubenswrapper[4651]: I1011 05:24:55.199546 4651 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b0507706-1820-417c-824e-e8420fda7baa-inventory\") on node \"crc\" DevicePath \"\"" Oct 11 05:24:55 crc kubenswrapper[4651]: I1011 05:24:55.199585 4651 reconciler_common.go:293] "Volume detached for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/b0507706-1820-417c-824e-e8420fda7baa-ovncontroller-config-0\") on node \"crc\" DevicePath \"\"" Oct 11 05:24:55 crc kubenswrapper[4651]: I1011 05:24:55.199597 4651 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b0507706-1820-417c-824e-e8420fda7baa-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 11 05:24:55 crc kubenswrapper[4651]: I1011 05:24:55.199606 4651 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b0507706-1820-417c-824e-e8420fda7baa-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 05:24:55 crc kubenswrapper[4651]: I1011 05:24:55.199615 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-48s7x\" (UniqueName: \"kubernetes.io/projected/b0507706-1820-417c-824e-e8420fda7baa-kube-api-access-48s7x\") on node \"crc\" DevicePath \"\"" Oct 11 05:24:55 crc kubenswrapper[4651]: I1011 05:24:55.552715 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-cfgdk" event={"ID":"b0507706-1820-417c-824e-e8420fda7baa","Type":"ContainerDied","Data":"4a729d87425f76933fce8561638ccca259ce6100163b5e847452d84ee4d4c668"} Oct 11 05:24:55 crc kubenswrapper[4651]: I1011 05:24:55.552778 4651 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4a729d87425f76933fce8561638ccca259ce6100163b5e847452d84ee4d4c668" Oct 11 05:24:55 crc kubenswrapper[4651]: I1011 05:24:55.552931 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-cfgdk" Oct 11 05:24:55 crc kubenswrapper[4651]: I1011 05:24:55.669757 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-whlk4"] Oct 11 05:24:55 crc kubenswrapper[4651]: E1011 05:24:55.670193 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b0507706-1820-417c-824e-e8420fda7baa" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Oct 11 05:24:55 crc kubenswrapper[4651]: I1011 05:24:55.670212 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="b0507706-1820-417c-824e-e8420fda7baa" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Oct 11 05:24:55 crc kubenswrapper[4651]: I1011 05:24:55.670410 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="b0507706-1820-417c-824e-e8420fda7baa" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Oct 11 05:24:55 crc kubenswrapper[4651]: I1011 05:24:55.671099 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-whlk4" Oct 11 05:24:55 crc kubenswrapper[4651]: I1011 05:24:55.674588 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-neutron-config" Oct 11 05:24:55 crc kubenswrapper[4651]: I1011 05:24:55.674708 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 11 05:24:55 crc kubenswrapper[4651]: I1011 05:24:55.674852 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 11 05:24:55 crc kubenswrapper[4651]: I1011 05:24:55.674965 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-ovn-metadata-agent-neutron-config" Oct 11 05:24:55 crc kubenswrapper[4651]: I1011 05:24:55.675001 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-r489p" Oct 11 05:24:55 crc kubenswrapper[4651]: I1011 05:24:55.675285 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 11 05:24:55 crc kubenswrapper[4651]: I1011 05:24:55.693979 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-whlk4"] Oct 11 05:24:55 crc kubenswrapper[4651]: I1011 05:24:55.710353 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/6e7f2e9b-b154-4d49-beea-654732761981-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-whlk4\" (UID: \"6e7f2e9b-b154-4d49-beea-654732761981\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-whlk4" Oct 11 05:24:55 crc kubenswrapper[4651]: I1011 05:24:55.710570 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6e7f2e9b-b154-4d49-beea-654732761981-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-whlk4\" (UID: \"6e7f2e9b-b154-4d49-beea-654732761981\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-whlk4" Oct 11 05:24:55 crc kubenswrapper[4651]: I1011 05:24:55.710692 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e7f2e9b-b154-4d49-beea-654732761981-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-whlk4\" (UID: \"6e7f2e9b-b154-4d49-beea-654732761981\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-whlk4" Oct 11 05:24:55 crc kubenswrapper[4651]: I1011 05:24:55.710812 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jdzzm\" (UniqueName: \"kubernetes.io/projected/6e7f2e9b-b154-4d49-beea-654732761981-kube-api-access-jdzzm\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-whlk4\" (UID: \"6e7f2e9b-b154-4d49-beea-654732761981\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-whlk4" Oct 11 05:24:55 crc kubenswrapper[4651]: I1011 05:24:55.710970 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/6e7f2e9b-b154-4d49-beea-654732761981-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-whlk4\" (UID: \"6e7f2e9b-b154-4d49-beea-654732761981\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-whlk4" Oct 11 05:24:55 crc kubenswrapper[4651]: I1011 05:24:55.711131 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6e7f2e9b-b154-4d49-beea-654732761981-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-whlk4\" (UID: \"6e7f2e9b-b154-4d49-beea-654732761981\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-whlk4" Oct 11 05:24:55 crc kubenswrapper[4651]: I1011 05:24:55.812601 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jdzzm\" (UniqueName: \"kubernetes.io/projected/6e7f2e9b-b154-4d49-beea-654732761981-kube-api-access-jdzzm\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-whlk4\" (UID: \"6e7f2e9b-b154-4d49-beea-654732761981\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-whlk4" Oct 11 05:24:55 crc kubenswrapper[4651]: I1011 05:24:55.813148 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/6e7f2e9b-b154-4d49-beea-654732761981-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-whlk4\" (UID: \"6e7f2e9b-b154-4d49-beea-654732761981\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-whlk4" Oct 11 05:24:55 crc kubenswrapper[4651]: I1011 05:24:55.813424 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6e7f2e9b-b154-4d49-beea-654732761981-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-whlk4\" (UID: \"6e7f2e9b-b154-4d49-beea-654732761981\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-whlk4" Oct 11 05:24:55 crc kubenswrapper[4651]: I1011 05:24:55.813636 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/6e7f2e9b-b154-4d49-beea-654732761981-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-whlk4\" (UID: \"6e7f2e9b-b154-4d49-beea-654732761981\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-whlk4" Oct 11 05:24:55 crc kubenswrapper[4651]: I1011 05:24:55.813973 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6e7f2e9b-b154-4d49-beea-654732761981-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-whlk4\" (UID: \"6e7f2e9b-b154-4d49-beea-654732761981\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-whlk4" Oct 11 05:24:55 crc kubenswrapper[4651]: I1011 05:24:55.814168 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e7f2e9b-b154-4d49-beea-654732761981-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-whlk4\" (UID: \"6e7f2e9b-b154-4d49-beea-654732761981\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-whlk4" Oct 11 05:24:55 crc kubenswrapper[4651]: I1011 05:24:55.818366 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/6e7f2e9b-b154-4d49-beea-654732761981-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-whlk4\" (UID: \"6e7f2e9b-b154-4d49-beea-654732761981\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-whlk4" Oct 11 05:24:55 crc kubenswrapper[4651]: I1011 05:24:55.818909 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6e7f2e9b-b154-4d49-beea-654732761981-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-whlk4\" (UID: \"6e7f2e9b-b154-4d49-beea-654732761981\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-whlk4" Oct 11 05:24:55 crc kubenswrapper[4651]: I1011 05:24:55.819168 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/6e7f2e9b-b154-4d49-beea-654732761981-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-whlk4\" (UID: \"6e7f2e9b-b154-4d49-beea-654732761981\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-whlk4" Oct 11 05:24:55 crc kubenswrapper[4651]: I1011 05:24:55.820530 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e7f2e9b-b154-4d49-beea-654732761981-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-whlk4\" (UID: \"6e7f2e9b-b154-4d49-beea-654732761981\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-whlk4" Oct 11 05:24:55 crc kubenswrapper[4651]: I1011 05:24:55.828774 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6e7f2e9b-b154-4d49-beea-654732761981-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-whlk4\" (UID: \"6e7f2e9b-b154-4d49-beea-654732761981\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-whlk4" Oct 11 05:24:55 crc kubenswrapper[4651]: I1011 05:24:55.844212 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jdzzm\" (UniqueName: \"kubernetes.io/projected/6e7f2e9b-b154-4d49-beea-654732761981-kube-api-access-jdzzm\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-whlk4\" (UID: \"6e7f2e9b-b154-4d49-beea-654732761981\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-whlk4" Oct 11 05:24:56 crc kubenswrapper[4651]: I1011 05:24:56.014349 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-whlk4" Oct 11 05:24:56 crc kubenswrapper[4651]: I1011 05:24:56.432245 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-whlk4"] Oct 11 05:24:56 crc kubenswrapper[4651]: I1011 05:24:56.567445 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-whlk4" event={"ID":"6e7f2e9b-b154-4d49-beea-654732761981","Type":"ContainerStarted","Data":"25709da988c48d576ecb55c03ac8ff04acaa8591357ffcab4b6a69d85bbeb49d"} Oct 11 05:24:57 crc kubenswrapper[4651]: I1011 05:24:57.582133 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-whlk4" event={"ID":"6e7f2e9b-b154-4d49-beea-654732761981","Type":"ContainerStarted","Data":"403fbb33d97aca4122338e44a097652410dcb28a0dfbd78030d7d55951190fc7"} Oct 11 05:24:57 crc kubenswrapper[4651]: I1011 05:24:57.614931 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-whlk4" podStartSLOduration=1.8693461120000001 podStartE2EDuration="2.614908972s" podCreationTimestamp="2025-10-11 05:24:55 +0000 UTC" firstStartedPulling="2025-10-11 05:24:56.43850069 +0000 UTC m=+2017.334733486" lastFinishedPulling="2025-10-11 05:24:57.18406354 +0000 UTC m=+2018.080296346" observedRunningTime="2025-10-11 05:24:57.600711087 +0000 UTC m=+2018.496943883" watchObservedRunningTime="2025-10-11 05:24:57.614908972 +0000 UTC m=+2018.511141768" Oct 11 05:25:06 crc kubenswrapper[4651]: I1011 05:25:06.131928 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-glmp6"] Oct 11 05:25:06 crc kubenswrapper[4651]: I1011 05:25:06.138133 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-glmp6" Oct 11 05:25:06 crc kubenswrapper[4651]: I1011 05:25:06.148127 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-glmp6"] Oct 11 05:25:06 crc kubenswrapper[4651]: I1011 05:25:06.202025 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/da57e636-abb0-45d2-96fa-9bd634ffedaa-utilities\") pod \"redhat-marketplace-glmp6\" (UID: \"da57e636-abb0-45d2-96fa-9bd634ffedaa\") " pod="openshift-marketplace/redhat-marketplace-glmp6" Oct 11 05:25:06 crc kubenswrapper[4651]: I1011 05:25:06.202090 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x777w\" (UniqueName: \"kubernetes.io/projected/da57e636-abb0-45d2-96fa-9bd634ffedaa-kube-api-access-x777w\") pod \"redhat-marketplace-glmp6\" (UID: \"da57e636-abb0-45d2-96fa-9bd634ffedaa\") " pod="openshift-marketplace/redhat-marketplace-glmp6" Oct 11 05:25:06 crc kubenswrapper[4651]: I1011 05:25:06.202179 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/da57e636-abb0-45d2-96fa-9bd634ffedaa-catalog-content\") pod \"redhat-marketplace-glmp6\" (UID: \"da57e636-abb0-45d2-96fa-9bd634ffedaa\") " pod="openshift-marketplace/redhat-marketplace-glmp6" Oct 11 05:25:06 crc kubenswrapper[4651]: I1011 05:25:06.304324 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x777w\" (UniqueName: \"kubernetes.io/projected/da57e636-abb0-45d2-96fa-9bd634ffedaa-kube-api-access-x777w\") pod \"redhat-marketplace-glmp6\" (UID: \"da57e636-abb0-45d2-96fa-9bd634ffedaa\") " pod="openshift-marketplace/redhat-marketplace-glmp6" Oct 11 05:25:06 crc kubenswrapper[4651]: I1011 05:25:06.304640 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/da57e636-abb0-45d2-96fa-9bd634ffedaa-catalog-content\") pod \"redhat-marketplace-glmp6\" (UID: \"da57e636-abb0-45d2-96fa-9bd634ffedaa\") " pod="openshift-marketplace/redhat-marketplace-glmp6" Oct 11 05:25:06 crc kubenswrapper[4651]: I1011 05:25:06.304762 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/da57e636-abb0-45d2-96fa-9bd634ffedaa-utilities\") pod \"redhat-marketplace-glmp6\" (UID: \"da57e636-abb0-45d2-96fa-9bd634ffedaa\") " pod="openshift-marketplace/redhat-marketplace-glmp6" Oct 11 05:25:06 crc kubenswrapper[4651]: I1011 05:25:06.305173 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/da57e636-abb0-45d2-96fa-9bd634ffedaa-catalog-content\") pod \"redhat-marketplace-glmp6\" (UID: \"da57e636-abb0-45d2-96fa-9bd634ffedaa\") " pod="openshift-marketplace/redhat-marketplace-glmp6" Oct 11 05:25:06 crc kubenswrapper[4651]: I1011 05:25:06.305378 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/da57e636-abb0-45d2-96fa-9bd634ffedaa-utilities\") pod \"redhat-marketplace-glmp6\" (UID: \"da57e636-abb0-45d2-96fa-9bd634ffedaa\") " pod="openshift-marketplace/redhat-marketplace-glmp6" Oct 11 05:25:06 crc kubenswrapper[4651]: I1011 05:25:06.330088 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x777w\" (UniqueName: \"kubernetes.io/projected/da57e636-abb0-45d2-96fa-9bd634ffedaa-kube-api-access-x777w\") pod \"redhat-marketplace-glmp6\" (UID: \"da57e636-abb0-45d2-96fa-9bd634ffedaa\") " pod="openshift-marketplace/redhat-marketplace-glmp6" Oct 11 05:25:06 crc kubenswrapper[4651]: I1011 05:25:06.483039 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-glmp6" Oct 11 05:25:06 crc kubenswrapper[4651]: I1011 05:25:06.843938 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-glmp6"] Oct 11 05:25:07 crc kubenswrapper[4651]: I1011 05:25:07.704435 4651 generic.go:334] "Generic (PLEG): container finished" podID="da57e636-abb0-45d2-96fa-9bd634ffedaa" containerID="1efd71d2b7e78b3ffba8cd72d1841dc7d150ce76f168708d39234bc37e854b31" exitCode=0 Oct 11 05:25:07 crc kubenswrapper[4651]: I1011 05:25:07.704528 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-glmp6" event={"ID":"da57e636-abb0-45d2-96fa-9bd634ffedaa","Type":"ContainerDied","Data":"1efd71d2b7e78b3ffba8cd72d1841dc7d150ce76f168708d39234bc37e854b31"} Oct 11 05:25:07 crc kubenswrapper[4651]: I1011 05:25:07.704819 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-glmp6" event={"ID":"da57e636-abb0-45d2-96fa-9bd634ffedaa","Type":"ContainerStarted","Data":"6de5fc599f5316925710ab91cdd2bfe32e0fdc15ed89b00b5b73b3d283625818"} Oct 11 05:25:08 crc kubenswrapper[4651]: I1011 05:25:08.720047 4651 generic.go:334] "Generic (PLEG): container finished" podID="da57e636-abb0-45d2-96fa-9bd634ffedaa" containerID="a0528fd12663a380a1abfe8fd764be56888ed8cc77d3f868779bea3cd86dd8d6" exitCode=0 Oct 11 05:25:08 crc kubenswrapper[4651]: I1011 05:25:08.720154 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-glmp6" event={"ID":"da57e636-abb0-45d2-96fa-9bd634ffedaa","Type":"ContainerDied","Data":"a0528fd12663a380a1abfe8fd764be56888ed8cc77d3f868779bea3cd86dd8d6"} Oct 11 05:25:09 crc kubenswrapper[4651]: I1011 05:25:09.736094 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-glmp6" event={"ID":"da57e636-abb0-45d2-96fa-9bd634ffedaa","Type":"ContainerStarted","Data":"9f74f9c8503128e48b6756f0619691cb0b908882d4a1e2df1d2fdf6a03286b6a"} Oct 11 05:25:09 crc kubenswrapper[4651]: I1011 05:25:09.797254 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-glmp6" podStartSLOduration=2.34392417 podStartE2EDuration="3.797223686s" podCreationTimestamp="2025-10-11 05:25:06 +0000 UTC" firstStartedPulling="2025-10-11 05:25:07.70808567 +0000 UTC m=+2028.604318496" lastFinishedPulling="2025-10-11 05:25:09.161385196 +0000 UTC m=+2030.057618012" observedRunningTime="2025-10-11 05:25:09.78260311 +0000 UTC m=+2030.678835986" watchObservedRunningTime="2025-10-11 05:25:09.797223686 +0000 UTC m=+2030.693456522" Oct 11 05:25:16 crc kubenswrapper[4651]: I1011 05:25:16.310513 4651 patch_prober.go:28] interesting pod/machine-config-daemon-78jnv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 05:25:16 crc kubenswrapper[4651]: I1011 05:25:16.311190 4651 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 05:25:16 crc kubenswrapper[4651]: I1011 05:25:16.484208 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-glmp6" Oct 11 05:25:16 crc kubenswrapper[4651]: I1011 05:25:16.484288 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-glmp6" Oct 11 05:25:16 crc kubenswrapper[4651]: I1011 05:25:16.567196 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-glmp6" Oct 11 05:25:16 crc kubenswrapper[4651]: I1011 05:25:16.905082 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-glmp6" Oct 11 05:25:16 crc kubenswrapper[4651]: I1011 05:25:16.968766 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-glmp6"] Oct 11 05:25:18 crc kubenswrapper[4651]: I1011 05:25:18.842024 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-glmp6" podUID="da57e636-abb0-45d2-96fa-9bd634ffedaa" containerName="registry-server" containerID="cri-o://9f74f9c8503128e48b6756f0619691cb0b908882d4a1e2df1d2fdf6a03286b6a" gracePeriod=2 Oct 11 05:25:19 crc kubenswrapper[4651]: I1011 05:25:19.301713 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-glmp6" Oct 11 05:25:19 crc kubenswrapper[4651]: I1011 05:25:19.348282 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/da57e636-abb0-45d2-96fa-9bd634ffedaa-utilities\") pod \"da57e636-abb0-45d2-96fa-9bd634ffedaa\" (UID: \"da57e636-abb0-45d2-96fa-9bd634ffedaa\") " Oct 11 05:25:19 crc kubenswrapper[4651]: I1011 05:25:19.348713 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x777w\" (UniqueName: \"kubernetes.io/projected/da57e636-abb0-45d2-96fa-9bd634ffedaa-kube-api-access-x777w\") pod \"da57e636-abb0-45d2-96fa-9bd634ffedaa\" (UID: \"da57e636-abb0-45d2-96fa-9bd634ffedaa\") " Oct 11 05:25:19 crc kubenswrapper[4651]: I1011 05:25:19.348932 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/da57e636-abb0-45d2-96fa-9bd634ffedaa-catalog-content\") pod \"da57e636-abb0-45d2-96fa-9bd634ffedaa\" (UID: \"da57e636-abb0-45d2-96fa-9bd634ffedaa\") " Oct 11 05:25:19 crc kubenswrapper[4651]: I1011 05:25:19.349113 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/da57e636-abb0-45d2-96fa-9bd634ffedaa-utilities" (OuterVolumeSpecName: "utilities") pod "da57e636-abb0-45d2-96fa-9bd634ffedaa" (UID: "da57e636-abb0-45d2-96fa-9bd634ffedaa"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 05:25:19 crc kubenswrapper[4651]: I1011 05:25:19.349780 4651 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/da57e636-abb0-45d2-96fa-9bd634ffedaa-utilities\") on node \"crc\" DevicePath \"\"" Oct 11 05:25:19 crc kubenswrapper[4651]: I1011 05:25:19.358100 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/da57e636-abb0-45d2-96fa-9bd634ffedaa-kube-api-access-x777w" (OuterVolumeSpecName: "kube-api-access-x777w") pod "da57e636-abb0-45d2-96fa-9bd634ffedaa" (UID: "da57e636-abb0-45d2-96fa-9bd634ffedaa"). InnerVolumeSpecName "kube-api-access-x777w". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:25:19 crc kubenswrapper[4651]: I1011 05:25:19.361996 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/da57e636-abb0-45d2-96fa-9bd634ffedaa-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "da57e636-abb0-45d2-96fa-9bd634ffedaa" (UID: "da57e636-abb0-45d2-96fa-9bd634ffedaa"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 05:25:19 crc kubenswrapper[4651]: I1011 05:25:19.451418 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x777w\" (UniqueName: \"kubernetes.io/projected/da57e636-abb0-45d2-96fa-9bd634ffedaa-kube-api-access-x777w\") on node \"crc\" DevicePath \"\"" Oct 11 05:25:19 crc kubenswrapper[4651]: I1011 05:25:19.451452 4651 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/da57e636-abb0-45d2-96fa-9bd634ffedaa-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 11 05:25:19 crc kubenswrapper[4651]: I1011 05:25:19.859409 4651 generic.go:334] "Generic (PLEG): container finished" podID="da57e636-abb0-45d2-96fa-9bd634ffedaa" containerID="9f74f9c8503128e48b6756f0619691cb0b908882d4a1e2df1d2fdf6a03286b6a" exitCode=0 Oct 11 05:25:19 crc kubenswrapper[4651]: I1011 05:25:19.859616 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-glmp6" Oct 11 05:25:19 crc kubenswrapper[4651]: I1011 05:25:19.859655 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-glmp6" event={"ID":"da57e636-abb0-45d2-96fa-9bd634ffedaa","Type":"ContainerDied","Data":"9f74f9c8503128e48b6756f0619691cb0b908882d4a1e2df1d2fdf6a03286b6a"} Oct 11 05:25:19 crc kubenswrapper[4651]: I1011 05:25:19.859941 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-glmp6" event={"ID":"da57e636-abb0-45d2-96fa-9bd634ffedaa","Type":"ContainerDied","Data":"6de5fc599f5316925710ab91cdd2bfe32e0fdc15ed89b00b5b73b3d283625818"} Oct 11 05:25:19 crc kubenswrapper[4651]: I1011 05:25:19.859976 4651 scope.go:117] "RemoveContainer" containerID="9f74f9c8503128e48b6756f0619691cb0b908882d4a1e2df1d2fdf6a03286b6a" Oct 11 05:25:19 crc kubenswrapper[4651]: I1011 05:25:19.903240 4651 scope.go:117] "RemoveContainer" containerID="a0528fd12663a380a1abfe8fd764be56888ed8cc77d3f868779bea3cd86dd8d6" Oct 11 05:25:19 crc kubenswrapper[4651]: I1011 05:25:19.910753 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-glmp6"] Oct 11 05:25:19 crc kubenswrapper[4651]: I1011 05:25:19.924467 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-glmp6"] Oct 11 05:25:19 crc kubenswrapper[4651]: I1011 05:25:19.952137 4651 scope.go:117] "RemoveContainer" containerID="1efd71d2b7e78b3ffba8cd72d1841dc7d150ce76f168708d39234bc37e854b31" Oct 11 05:25:19 crc kubenswrapper[4651]: I1011 05:25:19.995102 4651 scope.go:117] "RemoveContainer" containerID="9f74f9c8503128e48b6756f0619691cb0b908882d4a1e2df1d2fdf6a03286b6a" Oct 11 05:25:19 crc kubenswrapper[4651]: E1011 05:25:19.996093 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9f74f9c8503128e48b6756f0619691cb0b908882d4a1e2df1d2fdf6a03286b6a\": container with ID starting with 9f74f9c8503128e48b6756f0619691cb0b908882d4a1e2df1d2fdf6a03286b6a not found: ID does not exist" containerID="9f74f9c8503128e48b6756f0619691cb0b908882d4a1e2df1d2fdf6a03286b6a" Oct 11 05:25:19 crc kubenswrapper[4651]: I1011 05:25:19.996134 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9f74f9c8503128e48b6756f0619691cb0b908882d4a1e2df1d2fdf6a03286b6a"} err="failed to get container status \"9f74f9c8503128e48b6756f0619691cb0b908882d4a1e2df1d2fdf6a03286b6a\": rpc error: code = NotFound desc = could not find container \"9f74f9c8503128e48b6756f0619691cb0b908882d4a1e2df1d2fdf6a03286b6a\": container with ID starting with 9f74f9c8503128e48b6756f0619691cb0b908882d4a1e2df1d2fdf6a03286b6a not found: ID does not exist" Oct 11 05:25:19 crc kubenswrapper[4651]: I1011 05:25:19.996161 4651 scope.go:117] "RemoveContainer" containerID="a0528fd12663a380a1abfe8fd764be56888ed8cc77d3f868779bea3cd86dd8d6" Oct 11 05:25:19 crc kubenswrapper[4651]: E1011 05:25:19.997043 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a0528fd12663a380a1abfe8fd764be56888ed8cc77d3f868779bea3cd86dd8d6\": container with ID starting with a0528fd12663a380a1abfe8fd764be56888ed8cc77d3f868779bea3cd86dd8d6 not found: ID does not exist" containerID="a0528fd12663a380a1abfe8fd764be56888ed8cc77d3f868779bea3cd86dd8d6" Oct 11 05:25:19 crc kubenswrapper[4651]: I1011 05:25:19.997077 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a0528fd12663a380a1abfe8fd764be56888ed8cc77d3f868779bea3cd86dd8d6"} err="failed to get container status \"a0528fd12663a380a1abfe8fd764be56888ed8cc77d3f868779bea3cd86dd8d6\": rpc error: code = NotFound desc = could not find container \"a0528fd12663a380a1abfe8fd764be56888ed8cc77d3f868779bea3cd86dd8d6\": container with ID starting with a0528fd12663a380a1abfe8fd764be56888ed8cc77d3f868779bea3cd86dd8d6 not found: ID does not exist" Oct 11 05:25:19 crc kubenswrapper[4651]: I1011 05:25:19.997095 4651 scope.go:117] "RemoveContainer" containerID="1efd71d2b7e78b3ffba8cd72d1841dc7d150ce76f168708d39234bc37e854b31" Oct 11 05:25:19 crc kubenswrapper[4651]: E1011 05:25:19.997851 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1efd71d2b7e78b3ffba8cd72d1841dc7d150ce76f168708d39234bc37e854b31\": container with ID starting with 1efd71d2b7e78b3ffba8cd72d1841dc7d150ce76f168708d39234bc37e854b31 not found: ID does not exist" containerID="1efd71d2b7e78b3ffba8cd72d1841dc7d150ce76f168708d39234bc37e854b31" Oct 11 05:25:19 crc kubenswrapper[4651]: I1011 05:25:19.997984 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1efd71d2b7e78b3ffba8cd72d1841dc7d150ce76f168708d39234bc37e854b31"} err="failed to get container status \"1efd71d2b7e78b3ffba8cd72d1841dc7d150ce76f168708d39234bc37e854b31\": rpc error: code = NotFound desc = could not find container \"1efd71d2b7e78b3ffba8cd72d1841dc7d150ce76f168708d39234bc37e854b31\": container with ID starting with 1efd71d2b7e78b3ffba8cd72d1841dc7d150ce76f168708d39234bc37e854b31 not found: ID does not exist" Oct 11 05:25:21 crc kubenswrapper[4651]: I1011 05:25:21.922045 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="da57e636-abb0-45d2-96fa-9bd634ffedaa" path="/var/lib/kubelet/pods/da57e636-abb0-45d2-96fa-9bd634ffedaa/volumes" Oct 11 05:25:25 crc kubenswrapper[4651]: E1011 05:25:25.808253 4651 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podda57e636_abb0_45d2_96fa_9bd634ffedaa.slice/crio-6de5fc599f5316925710ab91cdd2bfe32e0fdc15ed89b00b5b73b3d283625818\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podda57e636_abb0_45d2_96fa_9bd634ffedaa.slice\": RecentStats: unable to find data in memory cache]" Oct 11 05:25:36 crc kubenswrapper[4651]: E1011 05:25:36.095610 4651 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podda57e636_abb0_45d2_96fa_9bd634ffedaa.slice/crio-6de5fc599f5316925710ab91cdd2bfe32e0fdc15ed89b00b5b73b3d283625818\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podda57e636_abb0_45d2_96fa_9bd634ffedaa.slice\": RecentStats: unable to find data in memory cache]" Oct 11 05:25:46 crc kubenswrapper[4651]: I1011 05:25:46.310386 4651 patch_prober.go:28] interesting pod/machine-config-daemon-78jnv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 05:25:46 crc kubenswrapper[4651]: I1011 05:25:46.311091 4651 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 05:25:46 crc kubenswrapper[4651]: I1011 05:25:46.311187 4651 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" Oct 11 05:25:46 crc kubenswrapper[4651]: I1011 05:25:46.312358 4651 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"5009eec8462e179c8ec2fb4c4e90392eec2df5ea9699e07187956c37e75e5cf8"} pod="openshift-machine-config-operator/machine-config-daemon-78jnv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 11 05:25:46 crc kubenswrapper[4651]: I1011 05:25:46.312474 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" containerName="machine-config-daemon" containerID="cri-o://5009eec8462e179c8ec2fb4c4e90392eec2df5ea9699e07187956c37e75e5cf8" gracePeriod=600 Oct 11 05:25:46 crc kubenswrapper[4651]: E1011 05:25:46.388144 4651 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podda57e636_abb0_45d2_96fa_9bd634ffedaa.slice/crio-6de5fc599f5316925710ab91cdd2bfe32e0fdc15ed89b00b5b73b3d283625818\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podda57e636_abb0_45d2_96fa_9bd634ffedaa.slice\": RecentStats: unable to find data in memory cache]" Oct 11 05:25:47 crc kubenswrapper[4651]: I1011 05:25:47.204056 4651 generic.go:334] "Generic (PLEG): container finished" podID="519a1ae1-e964-48b0-8b61-835146df28c1" containerID="5009eec8462e179c8ec2fb4c4e90392eec2df5ea9699e07187956c37e75e5cf8" exitCode=0 Oct 11 05:25:47 crc kubenswrapper[4651]: I1011 05:25:47.204164 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" event={"ID":"519a1ae1-e964-48b0-8b61-835146df28c1","Type":"ContainerDied","Data":"5009eec8462e179c8ec2fb4c4e90392eec2df5ea9699e07187956c37e75e5cf8"} Oct 11 05:25:47 crc kubenswrapper[4651]: I1011 05:25:47.204426 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" event={"ID":"519a1ae1-e964-48b0-8b61-835146df28c1","Type":"ContainerStarted","Data":"697c27631e7481f73fa15a7b6752c864f41b48d365447d86c85453a3dee57ea6"} Oct 11 05:25:47 crc kubenswrapper[4651]: I1011 05:25:47.204451 4651 scope.go:117] "RemoveContainer" containerID="9e22517d26caf83c6f2aa3910c4d04d694ebd6ca37f48f88ab2ba796ba35e6e7" Oct 11 05:25:51 crc kubenswrapper[4651]: I1011 05:25:51.251401 4651 generic.go:334] "Generic (PLEG): container finished" podID="6e7f2e9b-b154-4d49-beea-654732761981" containerID="403fbb33d97aca4122338e44a097652410dcb28a0dfbd78030d7d55951190fc7" exitCode=0 Oct 11 05:25:51 crc kubenswrapper[4651]: I1011 05:25:51.251528 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-whlk4" event={"ID":"6e7f2e9b-b154-4d49-beea-654732761981","Type":"ContainerDied","Data":"403fbb33d97aca4122338e44a097652410dcb28a0dfbd78030d7d55951190fc7"} Oct 11 05:25:52 crc kubenswrapper[4651]: I1011 05:25:52.706638 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-whlk4" Oct 11 05:25:52 crc kubenswrapper[4651]: I1011 05:25:52.845334 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/6e7f2e9b-b154-4d49-beea-654732761981-neutron-ovn-metadata-agent-neutron-config-0\") pod \"6e7f2e9b-b154-4d49-beea-654732761981\" (UID: \"6e7f2e9b-b154-4d49-beea-654732761981\") " Oct 11 05:25:52 crc kubenswrapper[4651]: I1011 05:25:52.845593 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6e7f2e9b-b154-4d49-beea-654732761981-ssh-key\") pod \"6e7f2e9b-b154-4d49-beea-654732761981\" (UID: \"6e7f2e9b-b154-4d49-beea-654732761981\") " Oct 11 05:25:52 crc kubenswrapper[4651]: I1011 05:25:52.845722 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e7f2e9b-b154-4d49-beea-654732761981-neutron-metadata-combined-ca-bundle\") pod \"6e7f2e9b-b154-4d49-beea-654732761981\" (UID: \"6e7f2e9b-b154-4d49-beea-654732761981\") " Oct 11 05:25:52 crc kubenswrapper[4651]: I1011 05:25:52.845809 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/6e7f2e9b-b154-4d49-beea-654732761981-nova-metadata-neutron-config-0\") pod \"6e7f2e9b-b154-4d49-beea-654732761981\" (UID: \"6e7f2e9b-b154-4d49-beea-654732761981\") " Oct 11 05:25:52 crc kubenswrapper[4651]: I1011 05:25:52.845945 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jdzzm\" (UniqueName: \"kubernetes.io/projected/6e7f2e9b-b154-4d49-beea-654732761981-kube-api-access-jdzzm\") pod \"6e7f2e9b-b154-4d49-beea-654732761981\" (UID: \"6e7f2e9b-b154-4d49-beea-654732761981\") " Oct 11 05:25:52 crc kubenswrapper[4651]: I1011 05:25:52.845997 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6e7f2e9b-b154-4d49-beea-654732761981-inventory\") pod \"6e7f2e9b-b154-4d49-beea-654732761981\" (UID: \"6e7f2e9b-b154-4d49-beea-654732761981\") " Oct 11 05:25:52 crc kubenswrapper[4651]: I1011 05:25:52.851770 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6e7f2e9b-b154-4d49-beea-654732761981-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "6e7f2e9b-b154-4d49-beea-654732761981" (UID: "6e7f2e9b-b154-4d49-beea-654732761981"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:25:52 crc kubenswrapper[4651]: I1011 05:25:52.854189 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6e7f2e9b-b154-4d49-beea-654732761981-kube-api-access-jdzzm" (OuterVolumeSpecName: "kube-api-access-jdzzm") pod "6e7f2e9b-b154-4d49-beea-654732761981" (UID: "6e7f2e9b-b154-4d49-beea-654732761981"). InnerVolumeSpecName "kube-api-access-jdzzm". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:25:52 crc kubenswrapper[4651]: I1011 05:25:52.880570 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6e7f2e9b-b154-4d49-beea-654732761981-nova-metadata-neutron-config-0" (OuterVolumeSpecName: "nova-metadata-neutron-config-0") pod "6e7f2e9b-b154-4d49-beea-654732761981" (UID: "6e7f2e9b-b154-4d49-beea-654732761981"). InnerVolumeSpecName "nova-metadata-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:25:52 crc kubenswrapper[4651]: I1011 05:25:52.883651 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6e7f2e9b-b154-4d49-beea-654732761981-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "6e7f2e9b-b154-4d49-beea-654732761981" (UID: "6e7f2e9b-b154-4d49-beea-654732761981"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:25:52 crc kubenswrapper[4651]: I1011 05:25:52.898875 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6e7f2e9b-b154-4d49-beea-654732761981-neutron-ovn-metadata-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-ovn-metadata-agent-neutron-config-0") pod "6e7f2e9b-b154-4d49-beea-654732761981" (UID: "6e7f2e9b-b154-4d49-beea-654732761981"). InnerVolumeSpecName "neutron-ovn-metadata-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:25:52 crc kubenswrapper[4651]: I1011 05:25:52.907977 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6e7f2e9b-b154-4d49-beea-654732761981-inventory" (OuterVolumeSpecName: "inventory") pod "6e7f2e9b-b154-4d49-beea-654732761981" (UID: "6e7f2e9b-b154-4d49-beea-654732761981"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:25:52 crc kubenswrapper[4651]: I1011 05:25:52.950012 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jdzzm\" (UniqueName: \"kubernetes.io/projected/6e7f2e9b-b154-4d49-beea-654732761981-kube-api-access-jdzzm\") on node \"crc\" DevicePath \"\"" Oct 11 05:25:52 crc kubenswrapper[4651]: I1011 05:25:52.950063 4651 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6e7f2e9b-b154-4d49-beea-654732761981-inventory\") on node \"crc\" DevicePath \"\"" Oct 11 05:25:52 crc kubenswrapper[4651]: I1011 05:25:52.950084 4651 reconciler_common.go:293] "Volume detached for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/6e7f2e9b-b154-4d49-beea-654732761981-neutron-ovn-metadata-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Oct 11 05:25:52 crc kubenswrapper[4651]: I1011 05:25:52.950103 4651 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6e7f2e9b-b154-4d49-beea-654732761981-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 11 05:25:52 crc kubenswrapper[4651]: I1011 05:25:52.950122 4651 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e7f2e9b-b154-4d49-beea-654732761981-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 05:25:52 crc kubenswrapper[4651]: I1011 05:25:52.950143 4651 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/6e7f2e9b-b154-4d49-beea-654732761981-nova-metadata-neutron-config-0\") on node \"crc\" DevicePath \"\"" Oct 11 05:25:53 crc kubenswrapper[4651]: I1011 05:25:53.277870 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-whlk4" event={"ID":"6e7f2e9b-b154-4d49-beea-654732761981","Type":"ContainerDied","Data":"25709da988c48d576ecb55c03ac8ff04acaa8591357ffcab4b6a69d85bbeb49d"} Oct 11 05:25:53 crc kubenswrapper[4651]: I1011 05:25:53.278316 4651 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="25709da988c48d576ecb55c03ac8ff04acaa8591357ffcab4b6a69d85bbeb49d" Oct 11 05:25:53 crc kubenswrapper[4651]: I1011 05:25:53.277948 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-whlk4" Oct 11 05:25:53 crc kubenswrapper[4651]: I1011 05:25:53.411875 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-jjxtk"] Oct 11 05:25:53 crc kubenswrapper[4651]: E1011 05:25:53.412405 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="da57e636-abb0-45d2-96fa-9bd634ffedaa" containerName="extract-utilities" Oct 11 05:25:53 crc kubenswrapper[4651]: I1011 05:25:53.412436 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="da57e636-abb0-45d2-96fa-9bd634ffedaa" containerName="extract-utilities" Oct 11 05:25:53 crc kubenswrapper[4651]: E1011 05:25:53.412472 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="da57e636-abb0-45d2-96fa-9bd634ffedaa" containerName="registry-server" Oct 11 05:25:53 crc kubenswrapper[4651]: I1011 05:25:53.412485 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="da57e636-abb0-45d2-96fa-9bd634ffedaa" containerName="registry-server" Oct 11 05:25:53 crc kubenswrapper[4651]: E1011 05:25:53.412520 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="da57e636-abb0-45d2-96fa-9bd634ffedaa" containerName="extract-content" Oct 11 05:25:53 crc kubenswrapper[4651]: I1011 05:25:53.412533 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="da57e636-abb0-45d2-96fa-9bd634ffedaa" containerName="extract-content" Oct 11 05:25:53 crc kubenswrapper[4651]: E1011 05:25:53.412556 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e7f2e9b-b154-4d49-beea-654732761981" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Oct 11 05:25:53 crc kubenswrapper[4651]: I1011 05:25:53.412571 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e7f2e9b-b154-4d49-beea-654732761981" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Oct 11 05:25:53 crc kubenswrapper[4651]: I1011 05:25:53.412919 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="6e7f2e9b-b154-4d49-beea-654732761981" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Oct 11 05:25:53 crc kubenswrapper[4651]: I1011 05:25:53.412952 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="da57e636-abb0-45d2-96fa-9bd634ffedaa" containerName="registry-server" Oct 11 05:25:53 crc kubenswrapper[4651]: I1011 05:25:53.414016 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-jjxtk" Oct 11 05:25:53 crc kubenswrapper[4651]: I1011 05:25:53.417294 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 11 05:25:53 crc kubenswrapper[4651]: I1011 05:25:53.417889 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 11 05:25:53 crc kubenswrapper[4651]: I1011 05:25:53.419061 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 11 05:25:53 crc kubenswrapper[4651]: I1011 05:25:53.419231 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"libvirt-secret" Oct 11 05:25:53 crc kubenswrapper[4651]: I1011 05:25:53.419331 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-r489p" Oct 11 05:25:53 crc kubenswrapper[4651]: I1011 05:25:53.451960 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-jjxtk"] Oct 11 05:25:53 crc kubenswrapper[4651]: I1011 05:25:53.562366 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d343a98-7fde-4f8c-995f-39a826aa5f12-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-jjxtk\" (UID: \"6d343a98-7fde-4f8c-995f-39a826aa5f12\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-jjxtk" Oct 11 05:25:53 crc kubenswrapper[4651]: I1011 05:25:53.562429 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/6d343a98-7fde-4f8c-995f-39a826aa5f12-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-jjxtk\" (UID: \"6d343a98-7fde-4f8c-995f-39a826aa5f12\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-jjxtk" Oct 11 05:25:53 crc kubenswrapper[4651]: I1011 05:25:53.563734 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6d343a98-7fde-4f8c-995f-39a826aa5f12-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-jjxtk\" (UID: \"6d343a98-7fde-4f8c-995f-39a826aa5f12\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-jjxtk" Oct 11 05:25:53 crc kubenswrapper[4651]: I1011 05:25:53.563883 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p8hrk\" (UniqueName: \"kubernetes.io/projected/6d343a98-7fde-4f8c-995f-39a826aa5f12-kube-api-access-p8hrk\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-jjxtk\" (UID: \"6d343a98-7fde-4f8c-995f-39a826aa5f12\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-jjxtk" Oct 11 05:25:53 crc kubenswrapper[4651]: I1011 05:25:53.564114 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6d343a98-7fde-4f8c-995f-39a826aa5f12-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-jjxtk\" (UID: \"6d343a98-7fde-4f8c-995f-39a826aa5f12\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-jjxtk" Oct 11 05:25:53 crc kubenswrapper[4651]: I1011 05:25:53.665797 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p8hrk\" (UniqueName: \"kubernetes.io/projected/6d343a98-7fde-4f8c-995f-39a826aa5f12-kube-api-access-p8hrk\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-jjxtk\" (UID: \"6d343a98-7fde-4f8c-995f-39a826aa5f12\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-jjxtk" Oct 11 05:25:53 crc kubenswrapper[4651]: I1011 05:25:53.665936 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6d343a98-7fde-4f8c-995f-39a826aa5f12-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-jjxtk\" (UID: \"6d343a98-7fde-4f8c-995f-39a826aa5f12\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-jjxtk" Oct 11 05:25:53 crc kubenswrapper[4651]: I1011 05:25:53.665979 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/6d343a98-7fde-4f8c-995f-39a826aa5f12-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-jjxtk\" (UID: \"6d343a98-7fde-4f8c-995f-39a826aa5f12\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-jjxtk" Oct 11 05:25:53 crc kubenswrapper[4651]: I1011 05:25:53.666004 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d343a98-7fde-4f8c-995f-39a826aa5f12-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-jjxtk\" (UID: \"6d343a98-7fde-4f8c-995f-39a826aa5f12\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-jjxtk" Oct 11 05:25:53 crc kubenswrapper[4651]: I1011 05:25:53.666156 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6d343a98-7fde-4f8c-995f-39a826aa5f12-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-jjxtk\" (UID: \"6d343a98-7fde-4f8c-995f-39a826aa5f12\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-jjxtk" Oct 11 05:25:53 crc kubenswrapper[4651]: I1011 05:25:53.671059 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6d343a98-7fde-4f8c-995f-39a826aa5f12-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-jjxtk\" (UID: \"6d343a98-7fde-4f8c-995f-39a826aa5f12\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-jjxtk" Oct 11 05:25:53 crc kubenswrapper[4651]: I1011 05:25:53.671050 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/6d343a98-7fde-4f8c-995f-39a826aa5f12-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-jjxtk\" (UID: \"6d343a98-7fde-4f8c-995f-39a826aa5f12\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-jjxtk" Oct 11 05:25:53 crc kubenswrapper[4651]: I1011 05:25:53.674358 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6d343a98-7fde-4f8c-995f-39a826aa5f12-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-jjxtk\" (UID: \"6d343a98-7fde-4f8c-995f-39a826aa5f12\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-jjxtk" Oct 11 05:25:53 crc kubenswrapper[4651]: I1011 05:25:53.685662 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d343a98-7fde-4f8c-995f-39a826aa5f12-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-jjxtk\" (UID: \"6d343a98-7fde-4f8c-995f-39a826aa5f12\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-jjxtk" Oct 11 05:25:53 crc kubenswrapper[4651]: I1011 05:25:53.688605 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p8hrk\" (UniqueName: \"kubernetes.io/projected/6d343a98-7fde-4f8c-995f-39a826aa5f12-kube-api-access-p8hrk\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-jjxtk\" (UID: \"6d343a98-7fde-4f8c-995f-39a826aa5f12\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-jjxtk" Oct 11 05:25:53 crc kubenswrapper[4651]: I1011 05:25:53.741559 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-jjxtk" Oct 11 05:25:54 crc kubenswrapper[4651]: I1011 05:25:54.349562 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-jjxtk"] Oct 11 05:25:55 crc kubenswrapper[4651]: I1011 05:25:55.304330 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-jjxtk" event={"ID":"6d343a98-7fde-4f8c-995f-39a826aa5f12","Type":"ContainerStarted","Data":"c86f34ac49ce62f2a164d1c4c7cbe28d8e92d9c15b847946a22b837d73ec76a3"} Oct 11 05:25:55 crc kubenswrapper[4651]: I1011 05:25:55.304920 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-jjxtk" event={"ID":"6d343a98-7fde-4f8c-995f-39a826aa5f12","Type":"ContainerStarted","Data":"c935ba62dedd35cf1296c74cd78a672097cf59749ba5ccf554fcbe761dd084df"} Oct 11 05:25:55 crc kubenswrapper[4651]: I1011 05:25:55.342716 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-jjxtk" podStartSLOduration=1.901082648 podStartE2EDuration="2.342685186s" podCreationTimestamp="2025-10-11 05:25:53 +0000 UTC" firstStartedPulling="2025-10-11 05:25:54.355150889 +0000 UTC m=+2075.251383695" lastFinishedPulling="2025-10-11 05:25:54.796753407 +0000 UTC m=+2075.692986233" observedRunningTime="2025-10-11 05:25:55.327083785 +0000 UTC m=+2076.223316631" watchObservedRunningTime="2025-10-11 05:25:55.342685186 +0000 UTC m=+2076.238918022" Oct 11 05:25:56 crc kubenswrapper[4651]: E1011 05:25:56.660226 4651 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podda57e636_abb0_45d2_96fa_9bd634ffedaa.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podda57e636_abb0_45d2_96fa_9bd634ffedaa.slice/crio-6de5fc599f5316925710ab91cdd2bfe32e0fdc15ed89b00b5b73b3d283625818\": RecentStats: unable to find data in memory cache]" Oct 11 05:26:06 crc kubenswrapper[4651]: E1011 05:26:06.962756 4651 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podda57e636_abb0_45d2_96fa_9bd634ffedaa.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podda57e636_abb0_45d2_96fa_9bd634ffedaa.slice/crio-6de5fc599f5316925710ab91cdd2bfe32e0fdc15ed89b00b5b73b3d283625818\": RecentStats: unable to find data in memory cache]" Oct 11 05:26:17 crc kubenswrapper[4651]: E1011 05:26:17.262672 4651 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podda57e636_abb0_45d2_96fa_9bd634ffedaa.slice/crio-6de5fc599f5316925710ab91cdd2bfe32e0fdc15ed89b00b5b73b3d283625818\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podda57e636_abb0_45d2_96fa_9bd634ffedaa.slice\": RecentStats: unable to find data in memory cache]" Oct 11 05:26:19 crc kubenswrapper[4651]: E1011 05:26:19.912458 4651 fsHandler.go:119] failed to collect filesystem stats - rootDiskErr: could not stat "/var/lib/containers/storage/overlay/f6684a32462119a3f862331015a33f2693fe3cec7d20d4c10ca5e2cb936097cd/diff" to get inode usage: stat /var/lib/containers/storage/overlay/f6684a32462119a3f862331015a33f2693fe3cec7d20d4c10ca5e2cb936097cd/diff: no such file or directory, extraDiskErr: Oct 11 05:26:24 crc kubenswrapper[4651]: I1011 05:26:24.091243 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-jn5lw"] Oct 11 05:26:24 crc kubenswrapper[4651]: I1011 05:26:24.094015 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jn5lw" Oct 11 05:26:24 crc kubenswrapper[4651]: I1011 05:26:24.106562 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-jn5lw"] Oct 11 05:26:24 crc kubenswrapper[4651]: I1011 05:26:24.225016 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/378754e3-be29-406f-949b-0f3dd6b0f79c-catalog-content\") pod \"community-operators-jn5lw\" (UID: \"378754e3-be29-406f-949b-0f3dd6b0f79c\") " pod="openshift-marketplace/community-operators-jn5lw" Oct 11 05:26:24 crc kubenswrapper[4651]: I1011 05:26:24.225103 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/378754e3-be29-406f-949b-0f3dd6b0f79c-utilities\") pod \"community-operators-jn5lw\" (UID: \"378754e3-be29-406f-949b-0f3dd6b0f79c\") " pod="openshift-marketplace/community-operators-jn5lw" Oct 11 05:26:24 crc kubenswrapper[4651]: I1011 05:26:24.225162 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d4r2f\" (UniqueName: \"kubernetes.io/projected/378754e3-be29-406f-949b-0f3dd6b0f79c-kube-api-access-d4r2f\") pod \"community-operators-jn5lw\" (UID: \"378754e3-be29-406f-949b-0f3dd6b0f79c\") " pod="openshift-marketplace/community-operators-jn5lw" Oct 11 05:26:24 crc kubenswrapper[4651]: I1011 05:26:24.326754 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/378754e3-be29-406f-949b-0f3dd6b0f79c-utilities\") pod \"community-operators-jn5lw\" (UID: \"378754e3-be29-406f-949b-0f3dd6b0f79c\") " pod="openshift-marketplace/community-operators-jn5lw" Oct 11 05:26:24 crc kubenswrapper[4651]: I1011 05:26:24.326842 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d4r2f\" (UniqueName: \"kubernetes.io/projected/378754e3-be29-406f-949b-0f3dd6b0f79c-kube-api-access-d4r2f\") pod \"community-operators-jn5lw\" (UID: \"378754e3-be29-406f-949b-0f3dd6b0f79c\") " pod="openshift-marketplace/community-operators-jn5lw" Oct 11 05:26:24 crc kubenswrapper[4651]: I1011 05:26:24.326970 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/378754e3-be29-406f-949b-0f3dd6b0f79c-catalog-content\") pod \"community-operators-jn5lw\" (UID: \"378754e3-be29-406f-949b-0f3dd6b0f79c\") " pod="openshift-marketplace/community-operators-jn5lw" Oct 11 05:26:24 crc kubenswrapper[4651]: I1011 05:26:24.327515 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/378754e3-be29-406f-949b-0f3dd6b0f79c-catalog-content\") pod \"community-operators-jn5lw\" (UID: \"378754e3-be29-406f-949b-0f3dd6b0f79c\") " pod="openshift-marketplace/community-operators-jn5lw" Oct 11 05:26:24 crc kubenswrapper[4651]: I1011 05:26:24.327745 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/378754e3-be29-406f-949b-0f3dd6b0f79c-utilities\") pod \"community-operators-jn5lw\" (UID: \"378754e3-be29-406f-949b-0f3dd6b0f79c\") " pod="openshift-marketplace/community-operators-jn5lw" Oct 11 05:26:24 crc kubenswrapper[4651]: I1011 05:26:24.352384 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d4r2f\" (UniqueName: \"kubernetes.io/projected/378754e3-be29-406f-949b-0f3dd6b0f79c-kube-api-access-d4r2f\") pod \"community-operators-jn5lw\" (UID: \"378754e3-be29-406f-949b-0f3dd6b0f79c\") " pod="openshift-marketplace/community-operators-jn5lw" Oct 11 05:26:24 crc kubenswrapper[4651]: I1011 05:26:24.457241 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jn5lw" Oct 11 05:26:24 crc kubenswrapper[4651]: I1011 05:26:24.980083 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-jn5lw"] Oct 11 05:26:24 crc kubenswrapper[4651]: W1011 05:26:24.988051 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod378754e3_be29_406f_949b_0f3dd6b0f79c.slice/crio-dcbb4efbf82f6b6c218217d334206d6b56f3fadcf7b3a66b3990f37e71c90e31 WatchSource:0}: Error finding container dcbb4efbf82f6b6c218217d334206d6b56f3fadcf7b3a66b3990f37e71c90e31: Status 404 returned error can't find the container with id dcbb4efbf82f6b6c218217d334206d6b56f3fadcf7b3a66b3990f37e71c90e31 Oct 11 05:26:25 crc kubenswrapper[4651]: I1011 05:26:25.670379 4651 generic.go:334] "Generic (PLEG): container finished" podID="378754e3-be29-406f-949b-0f3dd6b0f79c" containerID="b07b316b7fb386a4f35f2825212dfb6f9fa132fdd6b0c9245ecc03d74dc6a152" exitCode=0 Oct 11 05:26:25 crc kubenswrapper[4651]: I1011 05:26:25.670425 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jn5lw" event={"ID":"378754e3-be29-406f-949b-0f3dd6b0f79c","Type":"ContainerDied","Data":"b07b316b7fb386a4f35f2825212dfb6f9fa132fdd6b0c9245ecc03d74dc6a152"} Oct 11 05:26:25 crc kubenswrapper[4651]: I1011 05:26:25.670453 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jn5lw" event={"ID":"378754e3-be29-406f-949b-0f3dd6b0f79c","Type":"ContainerStarted","Data":"dcbb4efbf82f6b6c218217d334206d6b56f3fadcf7b3a66b3990f37e71c90e31"} Oct 11 05:26:26 crc kubenswrapper[4651]: I1011 05:26:26.711723 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jn5lw" event={"ID":"378754e3-be29-406f-949b-0f3dd6b0f79c","Type":"ContainerStarted","Data":"6504e09df3d88387121fa2495015654026948cb237502016cb5691e8dc7271f3"} Oct 11 05:26:27 crc kubenswrapper[4651]: I1011 05:26:27.726243 4651 generic.go:334] "Generic (PLEG): container finished" podID="378754e3-be29-406f-949b-0f3dd6b0f79c" containerID="6504e09df3d88387121fa2495015654026948cb237502016cb5691e8dc7271f3" exitCode=0 Oct 11 05:26:27 crc kubenswrapper[4651]: I1011 05:26:27.726333 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jn5lw" event={"ID":"378754e3-be29-406f-949b-0f3dd6b0f79c","Type":"ContainerDied","Data":"6504e09df3d88387121fa2495015654026948cb237502016cb5691e8dc7271f3"} Oct 11 05:26:28 crc kubenswrapper[4651]: I1011 05:26:28.753573 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jn5lw" event={"ID":"378754e3-be29-406f-949b-0f3dd6b0f79c","Type":"ContainerStarted","Data":"1c23fc81a49b91fa5e798a9346cd7eb73c5563fcaa5e2469694c06881636cc09"} Oct 11 05:26:28 crc kubenswrapper[4651]: I1011 05:26:28.780033 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-jn5lw" podStartSLOduration=2.27745752 podStartE2EDuration="4.780005449s" podCreationTimestamp="2025-10-11 05:26:24 +0000 UTC" firstStartedPulling="2025-10-11 05:26:25.674697131 +0000 UTC m=+2106.570929937" lastFinishedPulling="2025-10-11 05:26:28.17724503 +0000 UTC m=+2109.073477866" observedRunningTime="2025-10-11 05:26:28.775185135 +0000 UTC m=+2109.671417991" watchObservedRunningTime="2025-10-11 05:26:28.780005449 +0000 UTC m=+2109.676238275" Oct 11 05:26:28 crc kubenswrapper[4651]: I1011 05:26:28.863593 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-rkh95"] Oct 11 05:26:28 crc kubenswrapper[4651]: I1011 05:26:28.866993 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rkh95" Oct 11 05:26:28 crc kubenswrapper[4651]: I1011 05:26:28.889234 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-rkh95"] Oct 11 05:26:29 crc kubenswrapper[4651]: I1011 05:26:29.021701 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xcz5q\" (UniqueName: \"kubernetes.io/projected/5932a184-ad38-4fcc-bb34-e571ebd7a241-kube-api-access-xcz5q\") pod \"redhat-operators-rkh95\" (UID: \"5932a184-ad38-4fcc-bb34-e571ebd7a241\") " pod="openshift-marketplace/redhat-operators-rkh95" Oct 11 05:26:29 crc kubenswrapper[4651]: I1011 05:26:29.021751 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5932a184-ad38-4fcc-bb34-e571ebd7a241-catalog-content\") pod \"redhat-operators-rkh95\" (UID: \"5932a184-ad38-4fcc-bb34-e571ebd7a241\") " pod="openshift-marketplace/redhat-operators-rkh95" Oct 11 05:26:29 crc kubenswrapper[4651]: I1011 05:26:29.021842 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5932a184-ad38-4fcc-bb34-e571ebd7a241-utilities\") pod \"redhat-operators-rkh95\" (UID: \"5932a184-ad38-4fcc-bb34-e571ebd7a241\") " pod="openshift-marketplace/redhat-operators-rkh95" Oct 11 05:26:29 crc kubenswrapper[4651]: I1011 05:26:29.123785 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xcz5q\" (UniqueName: \"kubernetes.io/projected/5932a184-ad38-4fcc-bb34-e571ebd7a241-kube-api-access-xcz5q\") pod \"redhat-operators-rkh95\" (UID: \"5932a184-ad38-4fcc-bb34-e571ebd7a241\") " pod="openshift-marketplace/redhat-operators-rkh95" Oct 11 05:26:29 crc kubenswrapper[4651]: I1011 05:26:29.123852 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5932a184-ad38-4fcc-bb34-e571ebd7a241-catalog-content\") pod \"redhat-operators-rkh95\" (UID: \"5932a184-ad38-4fcc-bb34-e571ebd7a241\") " pod="openshift-marketplace/redhat-operators-rkh95" Oct 11 05:26:29 crc kubenswrapper[4651]: I1011 05:26:29.123922 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5932a184-ad38-4fcc-bb34-e571ebd7a241-utilities\") pod \"redhat-operators-rkh95\" (UID: \"5932a184-ad38-4fcc-bb34-e571ebd7a241\") " pod="openshift-marketplace/redhat-operators-rkh95" Oct 11 05:26:29 crc kubenswrapper[4651]: I1011 05:26:29.124295 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5932a184-ad38-4fcc-bb34-e571ebd7a241-catalog-content\") pod \"redhat-operators-rkh95\" (UID: \"5932a184-ad38-4fcc-bb34-e571ebd7a241\") " pod="openshift-marketplace/redhat-operators-rkh95" Oct 11 05:26:29 crc kubenswrapper[4651]: I1011 05:26:29.124369 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5932a184-ad38-4fcc-bb34-e571ebd7a241-utilities\") pod \"redhat-operators-rkh95\" (UID: \"5932a184-ad38-4fcc-bb34-e571ebd7a241\") " pod="openshift-marketplace/redhat-operators-rkh95" Oct 11 05:26:29 crc kubenswrapper[4651]: I1011 05:26:29.149872 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xcz5q\" (UniqueName: \"kubernetes.io/projected/5932a184-ad38-4fcc-bb34-e571ebd7a241-kube-api-access-xcz5q\") pod \"redhat-operators-rkh95\" (UID: \"5932a184-ad38-4fcc-bb34-e571ebd7a241\") " pod="openshift-marketplace/redhat-operators-rkh95" Oct 11 05:26:29 crc kubenswrapper[4651]: I1011 05:26:29.224176 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rkh95" Oct 11 05:26:29 crc kubenswrapper[4651]: I1011 05:26:29.689657 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-rkh95"] Oct 11 05:26:29 crc kubenswrapper[4651]: I1011 05:26:29.764841 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rkh95" event={"ID":"5932a184-ad38-4fcc-bb34-e571ebd7a241","Type":"ContainerStarted","Data":"fe27c469b13045a14cec308969a49f069452c97520f813a6aaca313eeebcf329"} Oct 11 05:26:30 crc kubenswrapper[4651]: I1011 05:26:30.774952 4651 generic.go:334] "Generic (PLEG): container finished" podID="5932a184-ad38-4fcc-bb34-e571ebd7a241" containerID="ae481a0b4ce50b822504912c9bcd9b35d5cff662cea0e352e78ddfc54104e8f7" exitCode=0 Oct 11 05:26:30 crc kubenswrapper[4651]: I1011 05:26:30.775078 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rkh95" event={"ID":"5932a184-ad38-4fcc-bb34-e571ebd7a241","Type":"ContainerDied","Data":"ae481a0b4ce50b822504912c9bcd9b35d5cff662cea0e352e78ddfc54104e8f7"} Oct 11 05:26:31 crc kubenswrapper[4651]: I1011 05:26:31.793148 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rkh95" event={"ID":"5932a184-ad38-4fcc-bb34-e571ebd7a241","Type":"ContainerStarted","Data":"db8e19c6aab23d169143a015b659282dcf0c67c3876693691b4a5d48e0b37f8f"} Oct 11 05:26:34 crc kubenswrapper[4651]: I1011 05:26:34.457875 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-jn5lw" Oct 11 05:26:34 crc kubenswrapper[4651]: I1011 05:26:34.458233 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-jn5lw" Oct 11 05:26:34 crc kubenswrapper[4651]: I1011 05:26:34.514182 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-jn5lw" Oct 11 05:26:34 crc kubenswrapper[4651]: I1011 05:26:34.826736 4651 generic.go:334] "Generic (PLEG): container finished" podID="5932a184-ad38-4fcc-bb34-e571ebd7a241" containerID="db8e19c6aab23d169143a015b659282dcf0c67c3876693691b4a5d48e0b37f8f" exitCode=0 Oct 11 05:26:34 crc kubenswrapper[4651]: I1011 05:26:34.826838 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rkh95" event={"ID":"5932a184-ad38-4fcc-bb34-e571ebd7a241","Type":"ContainerDied","Data":"db8e19c6aab23d169143a015b659282dcf0c67c3876693691b4a5d48e0b37f8f"} Oct 11 05:26:34 crc kubenswrapper[4651]: I1011 05:26:34.883527 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-jn5lw" Oct 11 05:26:35 crc kubenswrapper[4651]: I1011 05:26:35.841982 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rkh95" event={"ID":"5932a184-ad38-4fcc-bb34-e571ebd7a241","Type":"ContainerStarted","Data":"88e0941aba1d4f3b20f748258f6965dafb421836272148902194a98acf36a790"} Oct 11 05:26:35 crc kubenswrapper[4651]: I1011 05:26:35.876545 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-rkh95" podStartSLOduration=3.2938214869999998 podStartE2EDuration="7.876519121s" podCreationTimestamp="2025-10-11 05:26:28 +0000 UTC" firstStartedPulling="2025-10-11 05:26:30.777758816 +0000 UTC m=+2111.673991652" lastFinishedPulling="2025-10-11 05:26:35.36045649 +0000 UTC m=+2116.256689286" observedRunningTime="2025-10-11 05:26:35.871977725 +0000 UTC m=+2116.768210521" watchObservedRunningTime="2025-10-11 05:26:35.876519121 +0000 UTC m=+2116.772751927" Oct 11 05:26:37 crc kubenswrapper[4651]: I1011 05:26:37.242769 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-jn5lw"] Oct 11 05:26:37 crc kubenswrapper[4651]: I1011 05:26:37.243016 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-jn5lw" podUID="378754e3-be29-406f-949b-0f3dd6b0f79c" containerName="registry-server" containerID="cri-o://1c23fc81a49b91fa5e798a9346cd7eb73c5563fcaa5e2469694c06881636cc09" gracePeriod=2 Oct 11 05:26:37 crc kubenswrapper[4651]: I1011 05:26:37.688373 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jn5lw" Oct 11 05:26:37 crc kubenswrapper[4651]: I1011 05:26:37.822735 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/378754e3-be29-406f-949b-0f3dd6b0f79c-utilities\") pod \"378754e3-be29-406f-949b-0f3dd6b0f79c\" (UID: \"378754e3-be29-406f-949b-0f3dd6b0f79c\") " Oct 11 05:26:37 crc kubenswrapper[4651]: I1011 05:26:37.822953 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4r2f\" (UniqueName: \"kubernetes.io/projected/378754e3-be29-406f-949b-0f3dd6b0f79c-kube-api-access-d4r2f\") pod \"378754e3-be29-406f-949b-0f3dd6b0f79c\" (UID: \"378754e3-be29-406f-949b-0f3dd6b0f79c\") " Oct 11 05:26:37 crc kubenswrapper[4651]: I1011 05:26:37.823098 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/378754e3-be29-406f-949b-0f3dd6b0f79c-catalog-content\") pod \"378754e3-be29-406f-949b-0f3dd6b0f79c\" (UID: \"378754e3-be29-406f-949b-0f3dd6b0f79c\") " Oct 11 05:26:37 crc kubenswrapper[4651]: I1011 05:26:37.823662 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/378754e3-be29-406f-949b-0f3dd6b0f79c-utilities" (OuterVolumeSpecName: "utilities") pod "378754e3-be29-406f-949b-0f3dd6b0f79c" (UID: "378754e3-be29-406f-949b-0f3dd6b0f79c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 05:26:37 crc kubenswrapper[4651]: I1011 05:26:37.825840 4651 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/378754e3-be29-406f-949b-0f3dd6b0f79c-utilities\") on node \"crc\" DevicePath \"\"" Oct 11 05:26:37 crc kubenswrapper[4651]: I1011 05:26:37.830673 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/378754e3-be29-406f-949b-0f3dd6b0f79c-kube-api-access-d4r2f" (OuterVolumeSpecName: "kube-api-access-d4r2f") pod "378754e3-be29-406f-949b-0f3dd6b0f79c" (UID: "378754e3-be29-406f-949b-0f3dd6b0f79c"). InnerVolumeSpecName "kube-api-access-d4r2f". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:26:37 crc kubenswrapper[4651]: I1011 05:26:37.869475 4651 generic.go:334] "Generic (PLEG): container finished" podID="378754e3-be29-406f-949b-0f3dd6b0f79c" containerID="1c23fc81a49b91fa5e798a9346cd7eb73c5563fcaa5e2469694c06881636cc09" exitCode=0 Oct 11 05:26:37 crc kubenswrapper[4651]: I1011 05:26:37.869578 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jn5lw" Oct 11 05:26:37 crc kubenswrapper[4651]: I1011 05:26:37.882722 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/378754e3-be29-406f-949b-0f3dd6b0f79c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "378754e3-be29-406f-949b-0f3dd6b0f79c" (UID: "378754e3-be29-406f-949b-0f3dd6b0f79c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 05:26:37 crc kubenswrapper[4651]: I1011 05:26:37.887595 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jn5lw" event={"ID":"378754e3-be29-406f-949b-0f3dd6b0f79c","Type":"ContainerDied","Data":"1c23fc81a49b91fa5e798a9346cd7eb73c5563fcaa5e2469694c06881636cc09"} Oct 11 05:26:37 crc kubenswrapper[4651]: I1011 05:26:37.887642 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jn5lw" event={"ID":"378754e3-be29-406f-949b-0f3dd6b0f79c","Type":"ContainerDied","Data":"dcbb4efbf82f6b6c218217d334206d6b56f3fadcf7b3a66b3990f37e71c90e31"} Oct 11 05:26:37 crc kubenswrapper[4651]: I1011 05:26:37.887661 4651 scope.go:117] "RemoveContainer" containerID="1c23fc81a49b91fa5e798a9346cd7eb73c5563fcaa5e2469694c06881636cc09" Oct 11 05:26:37 crc kubenswrapper[4651]: I1011 05:26:37.926058 4651 scope.go:117] "RemoveContainer" containerID="6504e09df3d88387121fa2495015654026948cb237502016cb5691e8dc7271f3" Oct 11 05:26:37 crc kubenswrapper[4651]: I1011 05:26:37.929750 4651 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/378754e3-be29-406f-949b-0f3dd6b0f79c-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 11 05:26:37 crc kubenswrapper[4651]: I1011 05:26:37.929798 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4r2f\" (UniqueName: \"kubernetes.io/projected/378754e3-be29-406f-949b-0f3dd6b0f79c-kube-api-access-d4r2f\") on node \"crc\" DevicePath \"\"" Oct 11 05:26:37 crc kubenswrapper[4651]: I1011 05:26:37.962502 4651 scope.go:117] "RemoveContainer" containerID="b07b316b7fb386a4f35f2825212dfb6f9fa132fdd6b0c9245ecc03d74dc6a152" Oct 11 05:26:38 crc kubenswrapper[4651]: I1011 05:26:38.007169 4651 scope.go:117] "RemoveContainer" containerID="1c23fc81a49b91fa5e798a9346cd7eb73c5563fcaa5e2469694c06881636cc09" Oct 11 05:26:38 crc kubenswrapper[4651]: E1011 05:26:38.007720 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1c23fc81a49b91fa5e798a9346cd7eb73c5563fcaa5e2469694c06881636cc09\": container with ID starting with 1c23fc81a49b91fa5e798a9346cd7eb73c5563fcaa5e2469694c06881636cc09 not found: ID does not exist" containerID="1c23fc81a49b91fa5e798a9346cd7eb73c5563fcaa5e2469694c06881636cc09" Oct 11 05:26:38 crc kubenswrapper[4651]: I1011 05:26:38.007799 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1c23fc81a49b91fa5e798a9346cd7eb73c5563fcaa5e2469694c06881636cc09"} err="failed to get container status \"1c23fc81a49b91fa5e798a9346cd7eb73c5563fcaa5e2469694c06881636cc09\": rpc error: code = NotFound desc = could not find container \"1c23fc81a49b91fa5e798a9346cd7eb73c5563fcaa5e2469694c06881636cc09\": container with ID starting with 1c23fc81a49b91fa5e798a9346cd7eb73c5563fcaa5e2469694c06881636cc09 not found: ID does not exist" Oct 11 05:26:38 crc kubenswrapper[4651]: I1011 05:26:38.007876 4651 scope.go:117] "RemoveContainer" containerID="6504e09df3d88387121fa2495015654026948cb237502016cb5691e8dc7271f3" Oct 11 05:26:38 crc kubenswrapper[4651]: E1011 05:26:38.008447 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6504e09df3d88387121fa2495015654026948cb237502016cb5691e8dc7271f3\": container with ID starting with 6504e09df3d88387121fa2495015654026948cb237502016cb5691e8dc7271f3 not found: ID does not exist" containerID="6504e09df3d88387121fa2495015654026948cb237502016cb5691e8dc7271f3" Oct 11 05:26:38 crc kubenswrapper[4651]: I1011 05:26:38.008475 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6504e09df3d88387121fa2495015654026948cb237502016cb5691e8dc7271f3"} err="failed to get container status \"6504e09df3d88387121fa2495015654026948cb237502016cb5691e8dc7271f3\": rpc error: code = NotFound desc = could not find container \"6504e09df3d88387121fa2495015654026948cb237502016cb5691e8dc7271f3\": container with ID starting with 6504e09df3d88387121fa2495015654026948cb237502016cb5691e8dc7271f3 not found: ID does not exist" Oct 11 05:26:38 crc kubenswrapper[4651]: I1011 05:26:38.008501 4651 scope.go:117] "RemoveContainer" containerID="b07b316b7fb386a4f35f2825212dfb6f9fa132fdd6b0c9245ecc03d74dc6a152" Oct 11 05:26:38 crc kubenswrapper[4651]: E1011 05:26:38.008773 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b07b316b7fb386a4f35f2825212dfb6f9fa132fdd6b0c9245ecc03d74dc6a152\": container with ID starting with b07b316b7fb386a4f35f2825212dfb6f9fa132fdd6b0c9245ecc03d74dc6a152 not found: ID does not exist" containerID="b07b316b7fb386a4f35f2825212dfb6f9fa132fdd6b0c9245ecc03d74dc6a152" Oct 11 05:26:38 crc kubenswrapper[4651]: I1011 05:26:38.008842 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b07b316b7fb386a4f35f2825212dfb6f9fa132fdd6b0c9245ecc03d74dc6a152"} err="failed to get container status \"b07b316b7fb386a4f35f2825212dfb6f9fa132fdd6b0c9245ecc03d74dc6a152\": rpc error: code = NotFound desc = could not find container \"b07b316b7fb386a4f35f2825212dfb6f9fa132fdd6b0c9245ecc03d74dc6a152\": container with ID starting with b07b316b7fb386a4f35f2825212dfb6f9fa132fdd6b0c9245ecc03d74dc6a152 not found: ID does not exist" Oct 11 05:26:38 crc kubenswrapper[4651]: I1011 05:26:38.212267 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-jn5lw"] Oct 11 05:26:38 crc kubenswrapper[4651]: I1011 05:26:38.219900 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-jn5lw"] Oct 11 05:26:39 crc kubenswrapper[4651]: I1011 05:26:39.225318 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-rkh95" Oct 11 05:26:39 crc kubenswrapper[4651]: I1011 05:26:39.225719 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-rkh95" Oct 11 05:26:39 crc kubenswrapper[4651]: I1011 05:26:39.888663 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="378754e3-be29-406f-949b-0f3dd6b0f79c" path="/var/lib/kubelet/pods/378754e3-be29-406f-949b-0f3dd6b0f79c/volumes" Oct 11 05:26:40 crc kubenswrapper[4651]: I1011 05:26:40.281494 4651 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-rkh95" podUID="5932a184-ad38-4fcc-bb34-e571ebd7a241" containerName="registry-server" probeResult="failure" output=< Oct 11 05:26:40 crc kubenswrapper[4651]: timeout: failed to connect service ":50051" within 1s Oct 11 05:26:40 crc kubenswrapper[4651]: > Oct 11 05:26:49 crc kubenswrapper[4651]: I1011 05:26:49.322209 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-rkh95" Oct 11 05:26:49 crc kubenswrapper[4651]: I1011 05:26:49.408052 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-rkh95" Oct 11 05:26:49 crc kubenswrapper[4651]: I1011 05:26:49.569139 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-rkh95"] Oct 11 05:26:51 crc kubenswrapper[4651]: I1011 05:26:51.019199 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-rkh95" podUID="5932a184-ad38-4fcc-bb34-e571ebd7a241" containerName="registry-server" containerID="cri-o://88e0941aba1d4f3b20f748258f6965dafb421836272148902194a98acf36a790" gracePeriod=2 Oct 11 05:26:51 crc kubenswrapper[4651]: I1011 05:26:51.546154 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rkh95" Oct 11 05:26:51 crc kubenswrapper[4651]: I1011 05:26:51.639065 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5932a184-ad38-4fcc-bb34-e571ebd7a241-utilities\") pod \"5932a184-ad38-4fcc-bb34-e571ebd7a241\" (UID: \"5932a184-ad38-4fcc-bb34-e571ebd7a241\") " Oct 11 05:26:51 crc kubenswrapper[4651]: I1011 05:26:51.639362 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcz5q\" (UniqueName: \"kubernetes.io/projected/5932a184-ad38-4fcc-bb34-e571ebd7a241-kube-api-access-xcz5q\") pod \"5932a184-ad38-4fcc-bb34-e571ebd7a241\" (UID: \"5932a184-ad38-4fcc-bb34-e571ebd7a241\") " Oct 11 05:26:51 crc kubenswrapper[4651]: I1011 05:26:51.639481 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5932a184-ad38-4fcc-bb34-e571ebd7a241-catalog-content\") pod \"5932a184-ad38-4fcc-bb34-e571ebd7a241\" (UID: \"5932a184-ad38-4fcc-bb34-e571ebd7a241\") " Oct 11 05:26:51 crc kubenswrapper[4651]: I1011 05:26:51.640745 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5932a184-ad38-4fcc-bb34-e571ebd7a241-utilities" (OuterVolumeSpecName: "utilities") pod "5932a184-ad38-4fcc-bb34-e571ebd7a241" (UID: "5932a184-ad38-4fcc-bb34-e571ebd7a241"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 05:26:51 crc kubenswrapper[4651]: I1011 05:26:51.648107 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5932a184-ad38-4fcc-bb34-e571ebd7a241-kube-api-access-xcz5q" (OuterVolumeSpecName: "kube-api-access-xcz5q") pod "5932a184-ad38-4fcc-bb34-e571ebd7a241" (UID: "5932a184-ad38-4fcc-bb34-e571ebd7a241"). InnerVolumeSpecName "kube-api-access-xcz5q". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:26:51 crc kubenswrapper[4651]: I1011 05:26:51.741768 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcz5q\" (UniqueName: \"kubernetes.io/projected/5932a184-ad38-4fcc-bb34-e571ebd7a241-kube-api-access-xcz5q\") on node \"crc\" DevicePath \"\"" Oct 11 05:26:51 crc kubenswrapper[4651]: I1011 05:26:51.741800 4651 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5932a184-ad38-4fcc-bb34-e571ebd7a241-utilities\") on node \"crc\" DevicePath \"\"" Oct 11 05:26:51 crc kubenswrapper[4651]: I1011 05:26:51.760497 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5932a184-ad38-4fcc-bb34-e571ebd7a241-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5932a184-ad38-4fcc-bb34-e571ebd7a241" (UID: "5932a184-ad38-4fcc-bb34-e571ebd7a241"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 05:26:51 crc kubenswrapper[4651]: I1011 05:26:51.843000 4651 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5932a184-ad38-4fcc-bb34-e571ebd7a241-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 11 05:26:52 crc kubenswrapper[4651]: I1011 05:26:52.035415 4651 generic.go:334] "Generic (PLEG): container finished" podID="5932a184-ad38-4fcc-bb34-e571ebd7a241" containerID="88e0941aba1d4f3b20f748258f6965dafb421836272148902194a98acf36a790" exitCode=0 Oct 11 05:26:52 crc kubenswrapper[4651]: I1011 05:26:52.035482 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rkh95" event={"ID":"5932a184-ad38-4fcc-bb34-e571ebd7a241","Type":"ContainerDied","Data":"88e0941aba1d4f3b20f748258f6965dafb421836272148902194a98acf36a790"} Oct 11 05:26:52 crc kubenswrapper[4651]: I1011 05:26:52.035545 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rkh95" event={"ID":"5932a184-ad38-4fcc-bb34-e571ebd7a241","Type":"ContainerDied","Data":"fe27c469b13045a14cec308969a49f069452c97520f813a6aaca313eeebcf329"} Oct 11 05:26:52 crc kubenswrapper[4651]: I1011 05:26:52.035567 4651 scope.go:117] "RemoveContainer" containerID="88e0941aba1d4f3b20f748258f6965dafb421836272148902194a98acf36a790" Oct 11 05:26:52 crc kubenswrapper[4651]: I1011 05:26:52.035871 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rkh95" Oct 11 05:26:52 crc kubenswrapper[4651]: I1011 05:26:52.090615 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-rkh95"] Oct 11 05:26:52 crc kubenswrapper[4651]: I1011 05:26:52.090949 4651 scope.go:117] "RemoveContainer" containerID="db8e19c6aab23d169143a015b659282dcf0c67c3876693691b4a5d48e0b37f8f" Oct 11 05:26:52 crc kubenswrapper[4651]: I1011 05:26:52.104137 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-rkh95"] Oct 11 05:26:52 crc kubenswrapper[4651]: I1011 05:26:52.127127 4651 scope.go:117] "RemoveContainer" containerID="ae481a0b4ce50b822504912c9bcd9b35d5cff662cea0e352e78ddfc54104e8f7" Oct 11 05:26:52 crc kubenswrapper[4651]: I1011 05:26:52.175312 4651 scope.go:117] "RemoveContainer" containerID="88e0941aba1d4f3b20f748258f6965dafb421836272148902194a98acf36a790" Oct 11 05:26:52 crc kubenswrapper[4651]: E1011 05:26:52.175798 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"88e0941aba1d4f3b20f748258f6965dafb421836272148902194a98acf36a790\": container with ID starting with 88e0941aba1d4f3b20f748258f6965dafb421836272148902194a98acf36a790 not found: ID does not exist" containerID="88e0941aba1d4f3b20f748258f6965dafb421836272148902194a98acf36a790" Oct 11 05:26:52 crc kubenswrapper[4651]: I1011 05:26:52.175852 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"88e0941aba1d4f3b20f748258f6965dafb421836272148902194a98acf36a790"} err="failed to get container status \"88e0941aba1d4f3b20f748258f6965dafb421836272148902194a98acf36a790\": rpc error: code = NotFound desc = could not find container \"88e0941aba1d4f3b20f748258f6965dafb421836272148902194a98acf36a790\": container with ID starting with 88e0941aba1d4f3b20f748258f6965dafb421836272148902194a98acf36a790 not found: ID does not exist" Oct 11 05:26:52 crc kubenswrapper[4651]: I1011 05:26:52.175883 4651 scope.go:117] "RemoveContainer" containerID="db8e19c6aab23d169143a015b659282dcf0c67c3876693691b4a5d48e0b37f8f" Oct 11 05:26:52 crc kubenswrapper[4651]: E1011 05:26:52.176345 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"db8e19c6aab23d169143a015b659282dcf0c67c3876693691b4a5d48e0b37f8f\": container with ID starting with db8e19c6aab23d169143a015b659282dcf0c67c3876693691b4a5d48e0b37f8f not found: ID does not exist" containerID="db8e19c6aab23d169143a015b659282dcf0c67c3876693691b4a5d48e0b37f8f" Oct 11 05:26:52 crc kubenswrapper[4651]: I1011 05:26:52.176381 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"db8e19c6aab23d169143a015b659282dcf0c67c3876693691b4a5d48e0b37f8f"} err="failed to get container status \"db8e19c6aab23d169143a015b659282dcf0c67c3876693691b4a5d48e0b37f8f\": rpc error: code = NotFound desc = could not find container \"db8e19c6aab23d169143a015b659282dcf0c67c3876693691b4a5d48e0b37f8f\": container with ID starting with db8e19c6aab23d169143a015b659282dcf0c67c3876693691b4a5d48e0b37f8f not found: ID does not exist" Oct 11 05:26:52 crc kubenswrapper[4651]: I1011 05:26:52.176395 4651 scope.go:117] "RemoveContainer" containerID="ae481a0b4ce50b822504912c9bcd9b35d5cff662cea0e352e78ddfc54104e8f7" Oct 11 05:26:52 crc kubenswrapper[4651]: E1011 05:26:52.176998 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ae481a0b4ce50b822504912c9bcd9b35d5cff662cea0e352e78ddfc54104e8f7\": container with ID starting with ae481a0b4ce50b822504912c9bcd9b35d5cff662cea0e352e78ddfc54104e8f7 not found: ID does not exist" containerID="ae481a0b4ce50b822504912c9bcd9b35d5cff662cea0e352e78ddfc54104e8f7" Oct 11 05:26:52 crc kubenswrapper[4651]: I1011 05:26:52.177072 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ae481a0b4ce50b822504912c9bcd9b35d5cff662cea0e352e78ddfc54104e8f7"} err="failed to get container status \"ae481a0b4ce50b822504912c9bcd9b35d5cff662cea0e352e78ddfc54104e8f7\": rpc error: code = NotFound desc = could not find container \"ae481a0b4ce50b822504912c9bcd9b35d5cff662cea0e352e78ddfc54104e8f7\": container with ID starting with ae481a0b4ce50b822504912c9bcd9b35d5cff662cea0e352e78ddfc54104e8f7 not found: ID does not exist" Oct 11 05:26:53 crc kubenswrapper[4651]: I1011 05:26:53.888453 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5932a184-ad38-4fcc-bb34-e571ebd7a241" path="/var/lib/kubelet/pods/5932a184-ad38-4fcc-bb34-e571ebd7a241/volumes" Oct 11 05:27:46 crc kubenswrapper[4651]: I1011 05:27:46.310370 4651 patch_prober.go:28] interesting pod/machine-config-daemon-78jnv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 05:27:46 crc kubenswrapper[4651]: I1011 05:27:46.311224 4651 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 05:28:16 crc kubenswrapper[4651]: I1011 05:28:16.310896 4651 patch_prober.go:28] interesting pod/machine-config-daemon-78jnv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 05:28:16 crc kubenswrapper[4651]: I1011 05:28:16.311852 4651 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 05:28:44 crc kubenswrapper[4651]: I1011 05:28:44.077617 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-9rfb6"] Oct 11 05:28:44 crc kubenswrapper[4651]: E1011 05:28:44.080565 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5932a184-ad38-4fcc-bb34-e571ebd7a241" containerName="extract-utilities" Oct 11 05:28:44 crc kubenswrapper[4651]: I1011 05:28:44.080582 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="5932a184-ad38-4fcc-bb34-e571ebd7a241" containerName="extract-utilities" Oct 11 05:28:44 crc kubenswrapper[4651]: E1011 05:28:44.080596 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="378754e3-be29-406f-949b-0f3dd6b0f79c" containerName="extract-content" Oct 11 05:28:44 crc kubenswrapper[4651]: I1011 05:28:44.080604 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="378754e3-be29-406f-949b-0f3dd6b0f79c" containerName="extract-content" Oct 11 05:28:44 crc kubenswrapper[4651]: E1011 05:28:44.080624 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5932a184-ad38-4fcc-bb34-e571ebd7a241" containerName="extract-content" Oct 11 05:28:44 crc kubenswrapper[4651]: I1011 05:28:44.080630 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="5932a184-ad38-4fcc-bb34-e571ebd7a241" containerName="extract-content" Oct 11 05:28:44 crc kubenswrapper[4651]: E1011 05:28:44.080643 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="378754e3-be29-406f-949b-0f3dd6b0f79c" containerName="registry-server" Oct 11 05:28:44 crc kubenswrapper[4651]: I1011 05:28:44.080648 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="378754e3-be29-406f-949b-0f3dd6b0f79c" containerName="registry-server" Oct 11 05:28:44 crc kubenswrapper[4651]: E1011 05:28:44.080663 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="378754e3-be29-406f-949b-0f3dd6b0f79c" containerName="extract-utilities" Oct 11 05:28:44 crc kubenswrapper[4651]: I1011 05:28:44.080669 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="378754e3-be29-406f-949b-0f3dd6b0f79c" containerName="extract-utilities" Oct 11 05:28:44 crc kubenswrapper[4651]: E1011 05:28:44.080684 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5932a184-ad38-4fcc-bb34-e571ebd7a241" containerName="registry-server" Oct 11 05:28:44 crc kubenswrapper[4651]: I1011 05:28:44.080690 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="5932a184-ad38-4fcc-bb34-e571ebd7a241" containerName="registry-server" Oct 11 05:28:44 crc kubenswrapper[4651]: I1011 05:28:44.080879 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="5932a184-ad38-4fcc-bb34-e571ebd7a241" containerName="registry-server" Oct 11 05:28:44 crc kubenswrapper[4651]: I1011 05:28:44.080891 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="378754e3-be29-406f-949b-0f3dd6b0f79c" containerName="registry-server" Oct 11 05:28:44 crc kubenswrapper[4651]: I1011 05:28:44.082154 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9rfb6" Oct 11 05:28:44 crc kubenswrapper[4651]: I1011 05:28:44.118163 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-9rfb6"] Oct 11 05:28:44 crc kubenswrapper[4651]: I1011 05:28:44.281088 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/84bdc669-7aff-462a-bf39-93e1635c76bf-utilities\") pod \"certified-operators-9rfb6\" (UID: \"84bdc669-7aff-462a-bf39-93e1635c76bf\") " pod="openshift-marketplace/certified-operators-9rfb6" Oct 11 05:28:44 crc kubenswrapper[4651]: I1011 05:28:44.281640 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/84bdc669-7aff-462a-bf39-93e1635c76bf-catalog-content\") pod \"certified-operators-9rfb6\" (UID: \"84bdc669-7aff-462a-bf39-93e1635c76bf\") " pod="openshift-marketplace/certified-operators-9rfb6" Oct 11 05:28:44 crc kubenswrapper[4651]: I1011 05:28:44.281717 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m9ngt\" (UniqueName: \"kubernetes.io/projected/84bdc669-7aff-462a-bf39-93e1635c76bf-kube-api-access-m9ngt\") pod \"certified-operators-9rfb6\" (UID: \"84bdc669-7aff-462a-bf39-93e1635c76bf\") " pod="openshift-marketplace/certified-operators-9rfb6" Oct 11 05:28:44 crc kubenswrapper[4651]: I1011 05:28:44.383758 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/84bdc669-7aff-462a-bf39-93e1635c76bf-utilities\") pod \"certified-operators-9rfb6\" (UID: \"84bdc669-7aff-462a-bf39-93e1635c76bf\") " pod="openshift-marketplace/certified-operators-9rfb6" Oct 11 05:28:44 crc kubenswrapper[4651]: I1011 05:28:44.383883 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/84bdc669-7aff-462a-bf39-93e1635c76bf-catalog-content\") pod \"certified-operators-9rfb6\" (UID: \"84bdc669-7aff-462a-bf39-93e1635c76bf\") " pod="openshift-marketplace/certified-operators-9rfb6" Oct 11 05:28:44 crc kubenswrapper[4651]: I1011 05:28:44.383907 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m9ngt\" (UniqueName: \"kubernetes.io/projected/84bdc669-7aff-462a-bf39-93e1635c76bf-kube-api-access-m9ngt\") pod \"certified-operators-9rfb6\" (UID: \"84bdc669-7aff-462a-bf39-93e1635c76bf\") " pod="openshift-marketplace/certified-operators-9rfb6" Oct 11 05:28:44 crc kubenswrapper[4651]: I1011 05:28:44.384746 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/84bdc669-7aff-462a-bf39-93e1635c76bf-utilities\") pod \"certified-operators-9rfb6\" (UID: \"84bdc669-7aff-462a-bf39-93e1635c76bf\") " pod="openshift-marketplace/certified-operators-9rfb6" Oct 11 05:28:44 crc kubenswrapper[4651]: I1011 05:28:44.384785 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/84bdc669-7aff-462a-bf39-93e1635c76bf-catalog-content\") pod \"certified-operators-9rfb6\" (UID: \"84bdc669-7aff-462a-bf39-93e1635c76bf\") " pod="openshift-marketplace/certified-operators-9rfb6" Oct 11 05:28:44 crc kubenswrapper[4651]: I1011 05:28:44.425677 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m9ngt\" (UniqueName: \"kubernetes.io/projected/84bdc669-7aff-462a-bf39-93e1635c76bf-kube-api-access-m9ngt\") pod \"certified-operators-9rfb6\" (UID: \"84bdc669-7aff-462a-bf39-93e1635c76bf\") " pod="openshift-marketplace/certified-operators-9rfb6" Oct 11 05:28:44 crc kubenswrapper[4651]: I1011 05:28:44.716319 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9rfb6" Oct 11 05:28:45 crc kubenswrapper[4651]: I1011 05:28:45.233699 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-9rfb6"] Oct 11 05:28:45 crc kubenswrapper[4651]: I1011 05:28:45.409413 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9rfb6" event={"ID":"84bdc669-7aff-462a-bf39-93e1635c76bf","Type":"ContainerStarted","Data":"e170b23cd9fa7775e87dd5b330ac578a2ae7bc2ab1f70f934bc6387b527a718f"} Oct 11 05:28:46 crc kubenswrapper[4651]: I1011 05:28:46.310270 4651 patch_prober.go:28] interesting pod/machine-config-daemon-78jnv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 05:28:46 crc kubenswrapper[4651]: I1011 05:28:46.310376 4651 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 05:28:46 crc kubenswrapper[4651]: I1011 05:28:46.310448 4651 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" Oct 11 05:28:46 crc kubenswrapper[4651]: I1011 05:28:46.311512 4651 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"697c27631e7481f73fa15a7b6752c864f41b48d365447d86c85453a3dee57ea6"} pod="openshift-machine-config-operator/machine-config-daemon-78jnv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 11 05:28:46 crc kubenswrapper[4651]: I1011 05:28:46.311601 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" containerName="machine-config-daemon" containerID="cri-o://697c27631e7481f73fa15a7b6752c864f41b48d365447d86c85453a3dee57ea6" gracePeriod=600 Oct 11 05:28:46 crc kubenswrapper[4651]: I1011 05:28:46.422162 4651 generic.go:334] "Generic (PLEG): container finished" podID="84bdc669-7aff-462a-bf39-93e1635c76bf" containerID="7722209e474032c6d834389de2a42fcc72f6728033b17d19a17045fc97a7af28" exitCode=0 Oct 11 05:28:46 crc kubenswrapper[4651]: I1011 05:28:46.422223 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9rfb6" event={"ID":"84bdc669-7aff-462a-bf39-93e1635c76bf","Type":"ContainerDied","Data":"7722209e474032c6d834389de2a42fcc72f6728033b17d19a17045fc97a7af28"} Oct 11 05:28:46 crc kubenswrapper[4651]: I1011 05:28:46.427393 4651 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 11 05:28:46 crc kubenswrapper[4651]: E1011 05:28:46.451522 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 05:28:47 crc kubenswrapper[4651]: I1011 05:28:47.432668 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9rfb6" event={"ID":"84bdc669-7aff-462a-bf39-93e1635c76bf","Type":"ContainerStarted","Data":"e0e93e2fcef1a4c6086757396248ec7de10d5d1948324b37dede764d5aae3d27"} Oct 11 05:28:47 crc kubenswrapper[4651]: I1011 05:28:47.435300 4651 generic.go:334] "Generic (PLEG): container finished" podID="519a1ae1-e964-48b0-8b61-835146df28c1" containerID="697c27631e7481f73fa15a7b6752c864f41b48d365447d86c85453a3dee57ea6" exitCode=0 Oct 11 05:28:47 crc kubenswrapper[4651]: I1011 05:28:47.435440 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" event={"ID":"519a1ae1-e964-48b0-8b61-835146df28c1","Type":"ContainerDied","Data":"697c27631e7481f73fa15a7b6752c864f41b48d365447d86c85453a3dee57ea6"} Oct 11 05:28:47 crc kubenswrapper[4651]: I1011 05:28:47.435579 4651 scope.go:117] "RemoveContainer" containerID="5009eec8462e179c8ec2fb4c4e90392eec2df5ea9699e07187956c37e75e5cf8" Oct 11 05:28:47 crc kubenswrapper[4651]: I1011 05:28:47.436119 4651 scope.go:117] "RemoveContainer" containerID="697c27631e7481f73fa15a7b6752c864f41b48d365447d86c85453a3dee57ea6" Oct 11 05:28:47 crc kubenswrapper[4651]: E1011 05:28:47.436510 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 05:28:48 crc kubenswrapper[4651]: I1011 05:28:48.453046 4651 generic.go:334] "Generic (PLEG): container finished" podID="84bdc669-7aff-462a-bf39-93e1635c76bf" containerID="e0e93e2fcef1a4c6086757396248ec7de10d5d1948324b37dede764d5aae3d27" exitCode=0 Oct 11 05:28:48 crc kubenswrapper[4651]: I1011 05:28:48.453138 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9rfb6" event={"ID":"84bdc669-7aff-462a-bf39-93e1635c76bf","Type":"ContainerDied","Data":"e0e93e2fcef1a4c6086757396248ec7de10d5d1948324b37dede764d5aae3d27"} Oct 11 05:28:49 crc kubenswrapper[4651]: I1011 05:28:49.467097 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9rfb6" event={"ID":"84bdc669-7aff-462a-bf39-93e1635c76bf","Type":"ContainerStarted","Data":"c12f6be58d802bf84b9d3a25c0d34d69d846f78ed62ce1caac9db51afabed637"} Oct 11 05:28:49 crc kubenswrapper[4651]: I1011 05:28:49.494349 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-9rfb6" podStartSLOduration=2.918203717 podStartE2EDuration="5.494329666s" podCreationTimestamp="2025-10-11 05:28:44 +0000 UTC" firstStartedPulling="2025-10-11 05:28:46.427016454 +0000 UTC m=+2247.323249290" lastFinishedPulling="2025-10-11 05:28:49.003142433 +0000 UTC m=+2249.899375239" observedRunningTime="2025-10-11 05:28:49.485033727 +0000 UTC m=+2250.381266533" watchObservedRunningTime="2025-10-11 05:28:49.494329666 +0000 UTC m=+2250.390562462" Oct 11 05:28:54 crc kubenswrapper[4651]: I1011 05:28:54.717431 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-9rfb6" Oct 11 05:28:54 crc kubenswrapper[4651]: I1011 05:28:54.718475 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-9rfb6" Oct 11 05:28:54 crc kubenswrapper[4651]: I1011 05:28:54.789460 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-9rfb6" Oct 11 05:28:55 crc kubenswrapper[4651]: I1011 05:28:55.640074 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-9rfb6" Oct 11 05:28:55 crc kubenswrapper[4651]: I1011 05:28:55.706999 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-9rfb6"] Oct 11 05:28:57 crc kubenswrapper[4651]: I1011 05:28:57.568909 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-9rfb6" podUID="84bdc669-7aff-462a-bf39-93e1635c76bf" containerName="registry-server" containerID="cri-o://c12f6be58d802bf84b9d3a25c0d34d69d846f78ed62ce1caac9db51afabed637" gracePeriod=2 Oct 11 05:28:58 crc kubenswrapper[4651]: I1011 05:28:58.087076 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9rfb6" Oct 11 05:28:58 crc kubenswrapper[4651]: I1011 05:28:58.255974 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/84bdc669-7aff-462a-bf39-93e1635c76bf-utilities\") pod \"84bdc669-7aff-462a-bf39-93e1635c76bf\" (UID: \"84bdc669-7aff-462a-bf39-93e1635c76bf\") " Oct 11 05:28:58 crc kubenswrapper[4651]: I1011 05:28:58.256332 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/84bdc669-7aff-462a-bf39-93e1635c76bf-catalog-content\") pod \"84bdc669-7aff-462a-bf39-93e1635c76bf\" (UID: \"84bdc669-7aff-462a-bf39-93e1635c76bf\") " Oct 11 05:28:58 crc kubenswrapper[4651]: I1011 05:28:58.256392 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m9ngt\" (UniqueName: \"kubernetes.io/projected/84bdc669-7aff-462a-bf39-93e1635c76bf-kube-api-access-m9ngt\") pod \"84bdc669-7aff-462a-bf39-93e1635c76bf\" (UID: \"84bdc669-7aff-462a-bf39-93e1635c76bf\") " Oct 11 05:28:58 crc kubenswrapper[4651]: I1011 05:28:58.257902 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/84bdc669-7aff-462a-bf39-93e1635c76bf-utilities" (OuterVolumeSpecName: "utilities") pod "84bdc669-7aff-462a-bf39-93e1635c76bf" (UID: "84bdc669-7aff-462a-bf39-93e1635c76bf"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 05:28:58 crc kubenswrapper[4651]: I1011 05:28:58.269799 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/84bdc669-7aff-462a-bf39-93e1635c76bf-kube-api-access-m9ngt" (OuterVolumeSpecName: "kube-api-access-m9ngt") pod "84bdc669-7aff-462a-bf39-93e1635c76bf" (UID: "84bdc669-7aff-462a-bf39-93e1635c76bf"). InnerVolumeSpecName "kube-api-access-m9ngt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:28:58 crc kubenswrapper[4651]: I1011 05:28:58.323567 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/84bdc669-7aff-462a-bf39-93e1635c76bf-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "84bdc669-7aff-462a-bf39-93e1635c76bf" (UID: "84bdc669-7aff-462a-bf39-93e1635c76bf"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 05:28:58 crc kubenswrapper[4651]: I1011 05:28:58.359890 4651 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/84bdc669-7aff-462a-bf39-93e1635c76bf-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 11 05:28:58 crc kubenswrapper[4651]: I1011 05:28:58.359927 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m9ngt\" (UniqueName: \"kubernetes.io/projected/84bdc669-7aff-462a-bf39-93e1635c76bf-kube-api-access-m9ngt\") on node \"crc\" DevicePath \"\"" Oct 11 05:28:58 crc kubenswrapper[4651]: I1011 05:28:58.359942 4651 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/84bdc669-7aff-462a-bf39-93e1635c76bf-utilities\") on node \"crc\" DevicePath \"\"" Oct 11 05:28:58 crc kubenswrapper[4651]: I1011 05:28:58.584334 4651 generic.go:334] "Generic (PLEG): container finished" podID="84bdc669-7aff-462a-bf39-93e1635c76bf" containerID="c12f6be58d802bf84b9d3a25c0d34d69d846f78ed62ce1caac9db51afabed637" exitCode=0 Oct 11 05:28:58 crc kubenswrapper[4651]: I1011 05:28:58.584390 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9rfb6" Oct 11 05:28:58 crc kubenswrapper[4651]: I1011 05:28:58.585785 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9rfb6" event={"ID":"84bdc669-7aff-462a-bf39-93e1635c76bf","Type":"ContainerDied","Data":"c12f6be58d802bf84b9d3a25c0d34d69d846f78ed62ce1caac9db51afabed637"} Oct 11 05:28:58 crc kubenswrapper[4651]: I1011 05:28:58.585972 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9rfb6" event={"ID":"84bdc669-7aff-462a-bf39-93e1635c76bf","Type":"ContainerDied","Data":"e170b23cd9fa7775e87dd5b330ac578a2ae7bc2ab1f70f934bc6387b527a718f"} Oct 11 05:28:58 crc kubenswrapper[4651]: I1011 05:28:58.586041 4651 scope.go:117] "RemoveContainer" containerID="c12f6be58d802bf84b9d3a25c0d34d69d846f78ed62ce1caac9db51afabed637" Oct 11 05:28:58 crc kubenswrapper[4651]: I1011 05:28:58.629049 4651 scope.go:117] "RemoveContainer" containerID="e0e93e2fcef1a4c6086757396248ec7de10d5d1948324b37dede764d5aae3d27" Oct 11 05:28:58 crc kubenswrapper[4651]: I1011 05:28:58.650479 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-9rfb6"] Oct 11 05:28:58 crc kubenswrapper[4651]: I1011 05:28:58.656968 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-9rfb6"] Oct 11 05:28:58 crc kubenswrapper[4651]: I1011 05:28:58.698683 4651 scope.go:117] "RemoveContainer" containerID="7722209e474032c6d834389de2a42fcc72f6728033b17d19a17045fc97a7af28" Oct 11 05:28:58 crc kubenswrapper[4651]: I1011 05:28:58.732012 4651 scope.go:117] "RemoveContainer" containerID="c12f6be58d802bf84b9d3a25c0d34d69d846f78ed62ce1caac9db51afabed637" Oct 11 05:28:58 crc kubenswrapper[4651]: E1011 05:28:58.732808 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c12f6be58d802bf84b9d3a25c0d34d69d846f78ed62ce1caac9db51afabed637\": container with ID starting with c12f6be58d802bf84b9d3a25c0d34d69d846f78ed62ce1caac9db51afabed637 not found: ID does not exist" containerID="c12f6be58d802bf84b9d3a25c0d34d69d846f78ed62ce1caac9db51afabed637" Oct 11 05:28:58 crc kubenswrapper[4651]: I1011 05:28:58.732952 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c12f6be58d802bf84b9d3a25c0d34d69d846f78ed62ce1caac9db51afabed637"} err="failed to get container status \"c12f6be58d802bf84b9d3a25c0d34d69d846f78ed62ce1caac9db51afabed637\": rpc error: code = NotFound desc = could not find container \"c12f6be58d802bf84b9d3a25c0d34d69d846f78ed62ce1caac9db51afabed637\": container with ID starting with c12f6be58d802bf84b9d3a25c0d34d69d846f78ed62ce1caac9db51afabed637 not found: ID does not exist" Oct 11 05:28:58 crc kubenswrapper[4651]: I1011 05:28:58.733025 4651 scope.go:117] "RemoveContainer" containerID="e0e93e2fcef1a4c6086757396248ec7de10d5d1948324b37dede764d5aae3d27" Oct 11 05:28:58 crc kubenswrapper[4651]: E1011 05:28:58.733804 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e0e93e2fcef1a4c6086757396248ec7de10d5d1948324b37dede764d5aae3d27\": container with ID starting with e0e93e2fcef1a4c6086757396248ec7de10d5d1948324b37dede764d5aae3d27 not found: ID does not exist" containerID="e0e93e2fcef1a4c6086757396248ec7de10d5d1948324b37dede764d5aae3d27" Oct 11 05:28:58 crc kubenswrapper[4651]: I1011 05:28:58.733866 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e0e93e2fcef1a4c6086757396248ec7de10d5d1948324b37dede764d5aae3d27"} err="failed to get container status \"e0e93e2fcef1a4c6086757396248ec7de10d5d1948324b37dede764d5aae3d27\": rpc error: code = NotFound desc = could not find container \"e0e93e2fcef1a4c6086757396248ec7de10d5d1948324b37dede764d5aae3d27\": container with ID starting with e0e93e2fcef1a4c6086757396248ec7de10d5d1948324b37dede764d5aae3d27 not found: ID does not exist" Oct 11 05:28:58 crc kubenswrapper[4651]: I1011 05:28:58.733901 4651 scope.go:117] "RemoveContainer" containerID="7722209e474032c6d834389de2a42fcc72f6728033b17d19a17045fc97a7af28" Oct 11 05:28:58 crc kubenswrapper[4651]: E1011 05:28:58.734635 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7722209e474032c6d834389de2a42fcc72f6728033b17d19a17045fc97a7af28\": container with ID starting with 7722209e474032c6d834389de2a42fcc72f6728033b17d19a17045fc97a7af28 not found: ID does not exist" containerID="7722209e474032c6d834389de2a42fcc72f6728033b17d19a17045fc97a7af28" Oct 11 05:28:58 crc kubenswrapper[4651]: I1011 05:28:58.734673 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7722209e474032c6d834389de2a42fcc72f6728033b17d19a17045fc97a7af28"} err="failed to get container status \"7722209e474032c6d834389de2a42fcc72f6728033b17d19a17045fc97a7af28\": rpc error: code = NotFound desc = could not find container \"7722209e474032c6d834389de2a42fcc72f6728033b17d19a17045fc97a7af28\": container with ID starting with 7722209e474032c6d834389de2a42fcc72f6728033b17d19a17045fc97a7af28 not found: ID does not exist" Oct 11 05:28:59 crc kubenswrapper[4651]: I1011 05:28:59.889282 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="84bdc669-7aff-462a-bf39-93e1635c76bf" path="/var/lib/kubelet/pods/84bdc669-7aff-462a-bf39-93e1635c76bf/volumes" Oct 11 05:29:00 crc kubenswrapper[4651]: I1011 05:29:00.870009 4651 scope.go:117] "RemoveContainer" containerID="697c27631e7481f73fa15a7b6752c864f41b48d365447d86c85453a3dee57ea6" Oct 11 05:29:00 crc kubenswrapper[4651]: E1011 05:29:00.870355 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 05:29:14 crc kubenswrapper[4651]: I1011 05:29:14.871770 4651 scope.go:117] "RemoveContainer" containerID="697c27631e7481f73fa15a7b6752c864f41b48d365447d86c85453a3dee57ea6" Oct 11 05:29:14 crc kubenswrapper[4651]: E1011 05:29:14.873536 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 05:29:29 crc kubenswrapper[4651]: I1011 05:29:29.883753 4651 scope.go:117] "RemoveContainer" containerID="697c27631e7481f73fa15a7b6752c864f41b48d365447d86c85453a3dee57ea6" Oct 11 05:29:29 crc kubenswrapper[4651]: E1011 05:29:29.886587 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 05:29:44 crc kubenswrapper[4651]: I1011 05:29:44.869283 4651 scope.go:117] "RemoveContainer" containerID="697c27631e7481f73fa15a7b6752c864f41b48d365447d86c85453a3dee57ea6" Oct 11 05:29:44 crc kubenswrapper[4651]: E1011 05:29:44.870454 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 05:29:59 crc kubenswrapper[4651]: I1011 05:29:59.880079 4651 scope.go:117] "RemoveContainer" containerID="697c27631e7481f73fa15a7b6752c864f41b48d365447d86c85453a3dee57ea6" Oct 11 05:29:59 crc kubenswrapper[4651]: E1011 05:29:59.881164 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 05:30:00 crc kubenswrapper[4651]: I1011 05:30:00.176736 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29336010-9lqt6"] Oct 11 05:30:00 crc kubenswrapper[4651]: E1011 05:30:00.178537 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84bdc669-7aff-462a-bf39-93e1635c76bf" containerName="extract-utilities" Oct 11 05:30:00 crc kubenswrapper[4651]: I1011 05:30:00.178814 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="84bdc669-7aff-462a-bf39-93e1635c76bf" containerName="extract-utilities" Oct 11 05:30:00 crc kubenswrapper[4651]: E1011 05:30:00.179051 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84bdc669-7aff-462a-bf39-93e1635c76bf" containerName="extract-content" Oct 11 05:30:00 crc kubenswrapper[4651]: I1011 05:30:00.179232 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="84bdc669-7aff-462a-bf39-93e1635c76bf" containerName="extract-content" Oct 11 05:30:00 crc kubenswrapper[4651]: E1011 05:30:00.179445 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84bdc669-7aff-462a-bf39-93e1635c76bf" containerName="registry-server" Oct 11 05:30:00 crc kubenswrapper[4651]: I1011 05:30:00.179641 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="84bdc669-7aff-462a-bf39-93e1635c76bf" containerName="registry-server" Oct 11 05:30:00 crc kubenswrapper[4651]: I1011 05:30:00.180176 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="84bdc669-7aff-462a-bf39-93e1635c76bf" containerName="registry-server" Oct 11 05:30:00 crc kubenswrapper[4651]: I1011 05:30:00.181342 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29336010-9lqt6" Oct 11 05:30:00 crc kubenswrapper[4651]: I1011 05:30:00.183938 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 11 05:30:00 crc kubenswrapper[4651]: I1011 05:30:00.184358 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 11 05:30:00 crc kubenswrapper[4651]: I1011 05:30:00.208597 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29336010-9lqt6"] Oct 11 05:30:00 crc kubenswrapper[4651]: I1011 05:30:00.270355 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c3c49084-83b4-4c97-96b6-00597dc9ae2a-config-volume\") pod \"collect-profiles-29336010-9lqt6\" (UID: \"c3c49084-83b4-4c97-96b6-00597dc9ae2a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29336010-9lqt6" Oct 11 05:30:00 crc kubenswrapper[4651]: I1011 05:30:00.270423 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z5csj\" (UniqueName: \"kubernetes.io/projected/c3c49084-83b4-4c97-96b6-00597dc9ae2a-kube-api-access-z5csj\") pod \"collect-profiles-29336010-9lqt6\" (UID: \"c3c49084-83b4-4c97-96b6-00597dc9ae2a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29336010-9lqt6" Oct 11 05:30:00 crc kubenswrapper[4651]: I1011 05:30:00.270555 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c3c49084-83b4-4c97-96b6-00597dc9ae2a-secret-volume\") pod \"collect-profiles-29336010-9lqt6\" (UID: \"c3c49084-83b4-4c97-96b6-00597dc9ae2a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29336010-9lqt6" Oct 11 05:30:00 crc kubenswrapper[4651]: I1011 05:30:00.372316 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c3c49084-83b4-4c97-96b6-00597dc9ae2a-config-volume\") pod \"collect-profiles-29336010-9lqt6\" (UID: \"c3c49084-83b4-4c97-96b6-00597dc9ae2a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29336010-9lqt6" Oct 11 05:30:00 crc kubenswrapper[4651]: I1011 05:30:00.372397 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z5csj\" (UniqueName: \"kubernetes.io/projected/c3c49084-83b4-4c97-96b6-00597dc9ae2a-kube-api-access-z5csj\") pod \"collect-profiles-29336010-9lqt6\" (UID: \"c3c49084-83b4-4c97-96b6-00597dc9ae2a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29336010-9lqt6" Oct 11 05:30:00 crc kubenswrapper[4651]: I1011 05:30:00.372551 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c3c49084-83b4-4c97-96b6-00597dc9ae2a-secret-volume\") pod \"collect-profiles-29336010-9lqt6\" (UID: \"c3c49084-83b4-4c97-96b6-00597dc9ae2a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29336010-9lqt6" Oct 11 05:30:00 crc kubenswrapper[4651]: I1011 05:30:00.373204 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c3c49084-83b4-4c97-96b6-00597dc9ae2a-config-volume\") pod \"collect-profiles-29336010-9lqt6\" (UID: \"c3c49084-83b4-4c97-96b6-00597dc9ae2a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29336010-9lqt6" Oct 11 05:30:00 crc kubenswrapper[4651]: I1011 05:30:00.383797 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c3c49084-83b4-4c97-96b6-00597dc9ae2a-secret-volume\") pod \"collect-profiles-29336010-9lqt6\" (UID: \"c3c49084-83b4-4c97-96b6-00597dc9ae2a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29336010-9lqt6" Oct 11 05:30:00 crc kubenswrapper[4651]: I1011 05:30:00.388106 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z5csj\" (UniqueName: \"kubernetes.io/projected/c3c49084-83b4-4c97-96b6-00597dc9ae2a-kube-api-access-z5csj\") pod \"collect-profiles-29336010-9lqt6\" (UID: \"c3c49084-83b4-4c97-96b6-00597dc9ae2a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29336010-9lqt6" Oct 11 05:30:00 crc kubenswrapper[4651]: I1011 05:30:00.510324 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29336010-9lqt6" Oct 11 05:30:00 crc kubenswrapper[4651]: I1011 05:30:00.786390 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29336010-9lqt6"] Oct 11 05:30:01 crc kubenswrapper[4651]: I1011 05:30:01.333406 4651 generic.go:334] "Generic (PLEG): container finished" podID="c3c49084-83b4-4c97-96b6-00597dc9ae2a" containerID="b2458b32ea3a1bf906670d0656379c37fc0be79c064c73a3d4fbcc17a4abefd5" exitCode=0 Oct 11 05:30:01 crc kubenswrapper[4651]: I1011 05:30:01.333483 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29336010-9lqt6" event={"ID":"c3c49084-83b4-4c97-96b6-00597dc9ae2a","Type":"ContainerDied","Data":"b2458b32ea3a1bf906670d0656379c37fc0be79c064c73a3d4fbcc17a4abefd5"} Oct 11 05:30:01 crc kubenswrapper[4651]: I1011 05:30:01.333526 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29336010-9lqt6" event={"ID":"c3c49084-83b4-4c97-96b6-00597dc9ae2a","Type":"ContainerStarted","Data":"b8d79d5f36eafa0d72142ecdf6d3b41aaa471db54a471dc16d2c1e93360d523e"} Oct 11 05:30:02 crc kubenswrapper[4651]: I1011 05:30:02.670934 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29336010-9lqt6" Oct 11 05:30:02 crc kubenswrapper[4651]: I1011 05:30:02.717782 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c3c49084-83b4-4c97-96b6-00597dc9ae2a-config-volume\") pod \"c3c49084-83b4-4c97-96b6-00597dc9ae2a\" (UID: \"c3c49084-83b4-4c97-96b6-00597dc9ae2a\") " Oct 11 05:30:02 crc kubenswrapper[4651]: I1011 05:30:02.717913 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z5csj\" (UniqueName: \"kubernetes.io/projected/c3c49084-83b4-4c97-96b6-00597dc9ae2a-kube-api-access-z5csj\") pod \"c3c49084-83b4-4c97-96b6-00597dc9ae2a\" (UID: \"c3c49084-83b4-4c97-96b6-00597dc9ae2a\") " Oct 11 05:30:02 crc kubenswrapper[4651]: I1011 05:30:02.718099 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c3c49084-83b4-4c97-96b6-00597dc9ae2a-secret-volume\") pod \"c3c49084-83b4-4c97-96b6-00597dc9ae2a\" (UID: \"c3c49084-83b4-4c97-96b6-00597dc9ae2a\") " Oct 11 05:30:02 crc kubenswrapper[4651]: I1011 05:30:02.718769 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c3c49084-83b4-4c97-96b6-00597dc9ae2a-config-volume" (OuterVolumeSpecName: "config-volume") pod "c3c49084-83b4-4c97-96b6-00597dc9ae2a" (UID: "c3c49084-83b4-4c97-96b6-00597dc9ae2a"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:30:02 crc kubenswrapper[4651]: I1011 05:30:02.726040 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c3c49084-83b4-4c97-96b6-00597dc9ae2a-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "c3c49084-83b4-4c97-96b6-00597dc9ae2a" (UID: "c3c49084-83b4-4c97-96b6-00597dc9ae2a"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:30:02 crc kubenswrapper[4651]: I1011 05:30:02.726089 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c3c49084-83b4-4c97-96b6-00597dc9ae2a-kube-api-access-z5csj" (OuterVolumeSpecName: "kube-api-access-z5csj") pod "c3c49084-83b4-4c97-96b6-00597dc9ae2a" (UID: "c3c49084-83b4-4c97-96b6-00597dc9ae2a"). InnerVolumeSpecName "kube-api-access-z5csj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:30:02 crc kubenswrapper[4651]: I1011 05:30:02.820358 4651 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c3c49084-83b4-4c97-96b6-00597dc9ae2a-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 11 05:30:02 crc kubenswrapper[4651]: I1011 05:30:02.820399 4651 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c3c49084-83b4-4c97-96b6-00597dc9ae2a-config-volume\") on node \"crc\" DevicePath \"\"" Oct 11 05:30:02 crc kubenswrapper[4651]: I1011 05:30:02.820409 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z5csj\" (UniqueName: \"kubernetes.io/projected/c3c49084-83b4-4c97-96b6-00597dc9ae2a-kube-api-access-z5csj\") on node \"crc\" DevicePath \"\"" Oct 11 05:30:03 crc kubenswrapper[4651]: I1011 05:30:03.358134 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29336010-9lqt6" event={"ID":"c3c49084-83b4-4c97-96b6-00597dc9ae2a","Type":"ContainerDied","Data":"b8d79d5f36eafa0d72142ecdf6d3b41aaa471db54a471dc16d2c1e93360d523e"} Oct 11 05:30:03 crc kubenswrapper[4651]: I1011 05:30:03.358185 4651 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b8d79d5f36eafa0d72142ecdf6d3b41aaa471db54a471dc16d2c1e93360d523e" Oct 11 05:30:03 crc kubenswrapper[4651]: I1011 05:30:03.358253 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29336010-9lqt6" Oct 11 05:30:03 crc kubenswrapper[4651]: I1011 05:30:03.766567 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29335965-cqjjn"] Oct 11 05:30:03 crc kubenswrapper[4651]: I1011 05:30:03.774294 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29335965-cqjjn"] Oct 11 05:30:03 crc kubenswrapper[4651]: I1011 05:30:03.885319 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b0ea38e2-3e31-4208-a918-2859626f0048" path="/var/lib/kubelet/pods/b0ea38e2-3e31-4208-a918-2859626f0048/volumes" Oct 11 05:30:11 crc kubenswrapper[4651]: I1011 05:30:11.870150 4651 scope.go:117] "RemoveContainer" containerID="697c27631e7481f73fa15a7b6752c864f41b48d365447d86c85453a3dee57ea6" Oct 11 05:30:11 crc kubenswrapper[4651]: E1011 05:30:11.871319 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 05:30:25 crc kubenswrapper[4651]: I1011 05:30:25.870532 4651 scope.go:117] "RemoveContainer" containerID="697c27631e7481f73fa15a7b6752c864f41b48d365447d86c85453a3dee57ea6" Oct 11 05:30:25 crc kubenswrapper[4651]: E1011 05:30:25.872020 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 05:30:34 crc kubenswrapper[4651]: I1011 05:30:34.961515 4651 scope.go:117] "RemoveContainer" containerID="99ab96f0ea2620e7a440e243341cdf40218cfd65fb90631d89fe8938009ebe36" Oct 11 05:30:36 crc kubenswrapper[4651]: I1011 05:30:36.869539 4651 scope.go:117] "RemoveContainer" containerID="697c27631e7481f73fa15a7b6752c864f41b48d365447d86c85453a3dee57ea6" Oct 11 05:30:36 crc kubenswrapper[4651]: E1011 05:30:36.870127 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 05:30:37 crc kubenswrapper[4651]: I1011 05:30:37.774882 4651 generic.go:334] "Generic (PLEG): container finished" podID="6d343a98-7fde-4f8c-995f-39a826aa5f12" containerID="c86f34ac49ce62f2a164d1c4c7cbe28d8e92d9c15b847946a22b837d73ec76a3" exitCode=0 Oct 11 05:30:37 crc kubenswrapper[4651]: I1011 05:30:37.775008 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-jjxtk" event={"ID":"6d343a98-7fde-4f8c-995f-39a826aa5f12","Type":"ContainerDied","Data":"c86f34ac49ce62f2a164d1c4c7cbe28d8e92d9c15b847946a22b837d73ec76a3"} Oct 11 05:30:39 crc kubenswrapper[4651]: I1011 05:30:39.353102 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-jjxtk" Oct 11 05:30:39 crc kubenswrapper[4651]: I1011 05:30:39.399072 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d343a98-7fde-4f8c-995f-39a826aa5f12-libvirt-combined-ca-bundle\") pod \"6d343a98-7fde-4f8c-995f-39a826aa5f12\" (UID: \"6d343a98-7fde-4f8c-995f-39a826aa5f12\") " Oct 11 05:30:39 crc kubenswrapper[4651]: I1011 05:30:39.399201 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6d343a98-7fde-4f8c-995f-39a826aa5f12-inventory\") pod \"6d343a98-7fde-4f8c-995f-39a826aa5f12\" (UID: \"6d343a98-7fde-4f8c-995f-39a826aa5f12\") " Oct 11 05:30:39 crc kubenswrapper[4651]: I1011 05:30:39.399276 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6d343a98-7fde-4f8c-995f-39a826aa5f12-ssh-key\") pod \"6d343a98-7fde-4f8c-995f-39a826aa5f12\" (UID: \"6d343a98-7fde-4f8c-995f-39a826aa5f12\") " Oct 11 05:30:39 crc kubenswrapper[4651]: I1011 05:30:39.399463 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p8hrk\" (UniqueName: \"kubernetes.io/projected/6d343a98-7fde-4f8c-995f-39a826aa5f12-kube-api-access-p8hrk\") pod \"6d343a98-7fde-4f8c-995f-39a826aa5f12\" (UID: \"6d343a98-7fde-4f8c-995f-39a826aa5f12\") " Oct 11 05:30:39 crc kubenswrapper[4651]: I1011 05:30:39.399558 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/6d343a98-7fde-4f8c-995f-39a826aa5f12-libvirt-secret-0\") pod \"6d343a98-7fde-4f8c-995f-39a826aa5f12\" (UID: \"6d343a98-7fde-4f8c-995f-39a826aa5f12\") " Oct 11 05:30:39 crc kubenswrapper[4651]: I1011 05:30:39.408298 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6d343a98-7fde-4f8c-995f-39a826aa5f12-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "6d343a98-7fde-4f8c-995f-39a826aa5f12" (UID: "6d343a98-7fde-4f8c-995f-39a826aa5f12"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:30:39 crc kubenswrapper[4651]: I1011 05:30:39.410104 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6d343a98-7fde-4f8c-995f-39a826aa5f12-kube-api-access-p8hrk" (OuterVolumeSpecName: "kube-api-access-p8hrk") pod "6d343a98-7fde-4f8c-995f-39a826aa5f12" (UID: "6d343a98-7fde-4f8c-995f-39a826aa5f12"). InnerVolumeSpecName "kube-api-access-p8hrk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:30:39 crc kubenswrapper[4651]: I1011 05:30:39.431772 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6d343a98-7fde-4f8c-995f-39a826aa5f12-inventory" (OuterVolumeSpecName: "inventory") pod "6d343a98-7fde-4f8c-995f-39a826aa5f12" (UID: "6d343a98-7fde-4f8c-995f-39a826aa5f12"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:30:39 crc kubenswrapper[4651]: I1011 05:30:39.436501 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6d343a98-7fde-4f8c-995f-39a826aa5f12-libvirt-secret-0" (OuterVolumeSpecName: "libvirt-secret-0") pod "6d343a98-7fde-4f8c-995f-39a826aa5f12" (UID: "6d343a98-7fde-4f8c-995f-39a826aa5f12"). InnerVolumeSpecName "libvirt-secret-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:30:39 crc kubenswrapper[4651]: I1011 05:30:39.455543 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6d343a98-7fde-4f8c-995f-39a826aa5f12-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "6d343a98-7fde-4f8c-995f-39a826aa5f12" (UID: "6d343a98-7fde-4f8c-995f-39a826aa5f12"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:30:39 crc kubenswrapper[4651]: I1011 05:30:39.501906 4651 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d343a98-7fde-4f8c-995f-39a826aa5f12-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 05:30:39 crc kubenswrapper[4651]: I1011 05:30:39.501948 4651 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6d343a98-7fde-4f8c-995f-39a826aa5f12-inventory\") on node \"crc\" DevicePath \"\"" Oct 11 05:30:39 crc kubenswrapper[4651]: I1011 05:30:39.501960 4651 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6d343a98-7fde-4f8c-995f-39a826aa5f12-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 11 05:30:39 crc kubenswrapper[4651]: I1011 05:30:39.501972 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p8hrk\" (UniqueName: \"kubernetes.io/projected/6d343a98-7fde-4f8c-995f-39a826aa5f12-kube-api-access-p8hrk\") on node \"crc\" DevicePath \"\"" Oct 11 05:30:39 crc kubenswrapper[4651]: I1011 05:30:39.501985 4651 reconciler_common.go:293] "Volume detached for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/6d343a98-7fde-4f8c-995f-39a826aa5f12-libvirt-secret-0\") on node \"crc\" DevicePath \"\"" Oct 11 05:30:39 crc kubenswrapper[4651]: I1011 05:30:39.810050 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-jjxtk" event={"ID":"6d343a98-7fde-4f8c-995f-39a826aa5f12","Type":"ContainerDied","Data":"c935ba62dedd35cf1296c74cd78a672097cf59749ba5ccf554fcbe761dd084df"} Oct 11 05:30:39 crc kubenswrapper[4651]: I1011 05:30:39.810104 4651 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c935ba62dedd35cf1296c74cd78a672097cf59749ba5ccf554fcbe761dd084df" Oct 11 05:30:39 crc kubenswrapper[4651]: I1011 05:30:39.810600 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-jjxtk" Oct 11 05:30:39 crc kubenswrapper[4651]: I1011 05:30:39.955528 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-t4rtg"] Oct 11 05:30:39 crc kubenswrapper[4651]: E1011 05:30:39.956006 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c3c49084-83b4-4c97-96b6-00597dc9ae2a" containerName="collect-profiles" Oct 11 05:30:39 crc kubenswrapper[4651]: I1011 05:30:39.956018 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="c3c49084-83b4-4c97-96b6-00597dc9ae2a" containerName="collect-profiles" Oct 11 05:30:39 crc kubenswrapper[4651]: E1011 05:30:39.956036 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6d343a98-7fde-4f8c-995f-39a826aa5f12" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Oct 11 05:30:39 crc kubenswrapper[4651]: I1011 05:30:39.956042 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="6d343a98-7fde-4f8c-995f-39a826aa5f12" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Oct 11 05:30:39 crc kubenswrapper[4651]: I1011 05:30:39.956199 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="6d343a98-7fde-4f8c-995f-39a826aa5f12" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Oct 11 05:30:39 crc kubenswrapper[4651]: I1011 05:30:39.956221 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="c3c49084-83b4-4c97-96b6-00597dc9ae2a" containerName="collect-profiles" Oct 11 05:30:39 crc kubenswrapper[4651]: I1011 05:30:39.956874 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t4rtg" Oct 11 05:30:39 crc kubenswrapper[4651]: I1011 05:30:39.964444 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-t4rtg"] Oct 11 05:30:39 crc kubenswrapper[4651]: I1011 05:30:39.968194 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-migration-ssh-key" Oct 11 05:30:39 crc kubenswrapper[4651]: I1011 05:30:39.968307 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-r489p" Oct 11 05:30:39 crc kubenswrapper[4651]: I1011 05:30:39.968373 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 11 05:30:39 crc kubenswrapper[4651]: I1011 05:30:39.969005 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 11 05:30:39 crc kubenswrapper[4651]: I1011 05:30:39.969081 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"nova-extra-config" Oct 11 05:30:39 crc kubenswrapper[4651]: I1011 05:30:39.969259 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 11 05:30:39 crc kubenswrapper[4651]: I1011 05:30:39.969288 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-compute-config" Oct 11 05:30:40 crc kubenswrapper[4651]: I1011 05:30:40.016887 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c2b4841c-2ea3-464b-8147-a24437d0d079-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-t4rtg\" (UID: \"c2b4841c-2ea3-464b-8147-a24437d0d079\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t4rtg" Oct 11 05:30:40 crc kubenswrapper[4651]: I1011 05:30:40.016993 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cj965\" (UniqueName: \"kubernetes.io/projected/c2b4841c-2ea3-464b-8147-a24437d0d079-kube-api-access-cj965\") pod \"nova-edpm-deployment-openstack-edpm-ipam-t4rtg\" (UID: \"c2b4841c-2ea3-464b-8147-a24437d0d079\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t4rtg" Oct 11 05:30:40 crc kubenswrapper[4651]: I1011 05:30:40.017030 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/c2b4841c-2ea3-464b-8147-a24437d0d079-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-t4rtg\" (UID: \"c2b4841c-2ea3-464b-8147-a24437d0d079\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t4rtg" Oct 11 05:30:40 crc kubenswrapper[4651]: I1011 05:30:40.017049 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2b4841c-2ea3-464b-8147-a24437d0d079-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-t4rtg\" (UID: \"c2b4841c-2ea3-464b-8147-a24437d0d079\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t4rtg" Oct 11 05:30:40 crc kubenswrapper[4651]: I1011 05:30:40.017112 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/c2b4841c-2ea3-464b-8147-a24437d0d079-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-t4rtg\" (UID: \"c2b4841c-2ea3-464b-8147-a24437d0d079\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t4rtg" Oct 11 05:30:40 crc kubenswrapper[4651]: I1011 05:30:40.017132 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/c2b4841c-2ea3-464b-8147-a24437d0d079-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-t4rtg\" (UID: \"c2b4841c-2ea3-464b-8147-a24437d0d079\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t4rtg" Oct 11 05:30:40 crc kubenswrapper[4651]: I1011 05:30:40.017165 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/c2b4841c-2ea3-464b-8147-a24437d0d079-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-t4rtg\" (UID: \"c2b4841c-2ea3-464b-8147-a24437d0d079\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t4rtg" Oct 11 05:30:40 crc kubenswrapper[4651]: I1011 05:30:40.017215 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/c2b4841c-2ea3-464b-8147-a24437d0d079-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-t4rtg\" (UID: \"c2b4841c-2ea3-464b-8147-a24437d0d079\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t4rtg" Oct 11 05:30:40 crc kubenswrapper[4651]: I1011 05:30:40.017239 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c2b4841c-2ea3-464b-8147-a24437d0d079-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-t4rtg\" (UID: \"c2b4841c-2ea3-464b-8147-a24437d0d079\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t4rtg" Oct 11 05:30:40 crc kubenswrapper[4651]: I1011 05:30:40.118788 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/c2b4841c-2ea3-464b-8147-a24437d0d079-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-t4rtg\" (UID: \"c2b4841c-2ea3-464b-8147-a24437d0d079\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t4rtg" Oct 11 05:30:40 crc kubenswrapper[4651]: I1011 05:30:40.118856 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2b4841c-2ea3-464b-8147-a24437d0d079-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-t4rtg\" (UID: \"c2b4841c-2ea3-464b-8147-a24437d0d079\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t4rtg" Oct 11 05:30:40 crc kubenswrapper[4651]: I1011 05:30:40.118935 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/c2b4841c-2ea3-464b-8147-a24437d0d079-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-t4rtg\" (UID: \"c2b4841c-2ea3-464b-8147-a24437d0d079\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t4rtg" Oct 11 05:30:40 crc kubenswrapper[4651]: I1011 05:30:40.118966 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/c2b4841c-2ea3-464b-8147-a24437d0d079-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-t4rtg\" (UID: \"c2b4841c-2ea3-464b-8147-a24437d0d079\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t4rtg" Oct 11 05:30:40 crc kubenswrapper[4651]: I1011 05:30:40.119010 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/c2b4841c-2ea3-464b-8147-a24437d0d079-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-t4rtg\" (UID: \"c2b4841c-2ea3-464b-8147-a24437d0d079\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t4rtg" Oct 11 05:30:40 crc kubenswrapper[4651]: I1011 05:30:40.119077 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/c2b4841c-2ea3-464b-8147-a24437d0d079-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-t4rtg\" (UID: \"c2b4841c-2ea3-464b-8147-a24437d0d079\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t4rtg" Oct 11 05:30:40 crc kubenswrapper[4651]: I1011 05:30:40.119112 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c2b4841c-2ea3-464b-8147-a24437d0d079-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-t4rtg\" (UID: \"c2b4841c-2ea3-464b-8147-a24437d0d079\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t4rtg" Oct 11 05:30:40 crc kubenswrapper[4651]: I1011 05:30:40.119190 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c2b4841c-2ea3-464b-8147-a24437d0d079-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-t4rtg\" (UID: \"c2b4841c-2ea3-464b-8147-a24437d0d079\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t4rtg" Oct 11 05:30:40 crc kubenswrapper[4651]: I1011 05:30:40.119320 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cj965\" (UniqueName: \"kubernetes.io/projected/c2b4841c-2ea3-464b-8147-a24437d0d079-kube-api-access-cj965\") pod \"nova-edpm-deployment-openstack-edpm-ipam-t4rtg\" (UID: \"c2b4841c-2ea3-464b-8147-a24437d0d079\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t4rtg" Oct 11 05:30:40 crc kubenswrapper[4651]: I1011 05:30:40.120768 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/c2b4841c-2ea3-464b-8147-a24437d0d079-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-t4rtg\" (UID: \"c2b4841c-2ea3-464b-8147-a24437d0d079\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t4rtg" Oct 11 05:30:40 crc kubenswrapper[4651]: I1011 05:30:40.123886 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/c2b4841c-2ea3-464b-8147-a24437d0d079-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-t4rtg\" (UID: \"c2b4841c-2ea3-464b-8147-a24437d0d079\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t4rtg" Oct 11 05:30:40 crc kubenswrapper[4651]: I1011 05:30:40.124764 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/c2b4841c-2ea3-464b-8147-a24437d0d079-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-t4rtg\" (UID: \"c2b4841c-2ea3-464b-8147-a24437d0d079\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t4rtg" Oct 11 05:30:40 crc kubenswrapper[4651]: I1011 05:30:40.125146 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c2b4841c-2ea3-464b-8147-a24437d0d079-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-t4rtg\" (UID: \"c2b4841c-2ea3-464b-8147-a24437d0d079\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t4rtg" Oct 11 05:30:40 crc kubenswrapper[4651]: I1011 05:30:40.125591 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2b4841c-2ea3-464b-8147-a24437d0d079-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-t4rtg\" (UID: \"c2b4841c-2ea3-464b-8147-a24437d0d079\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t4rtg" Oct 11 05:30:40 crc kubenswrapper[4651]: I1011 05:30:40.129137 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/c2b4841c-2ea3-464b-8147-a24437d0d079-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-t4rtg\" (UID: \"c2b4841c-2ea3-464b-8147-a24437d0d079\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t4rtg" Oct 11 05:30:40 crc kubenswrapper[4651]: I1011 05:30:40.130381 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c2b4841c-2ea3-464b-8147-a24437d0d079-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-t4rtg\" (UID: \"c2b4841c-2ea3-464b-8147-a24437d0d079\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t4rtg" Oct 11 05:30:40 crc kubenswrapper[4651]: I1011 05:30:40.139749 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/c2b4841c-2ea3-464b-8147-a24437d0d079-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-t4rtg\" (UID: \"c2b4841c-2ea3-464b-8147-a24437d0d079\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t4rtg" Oct 11 05:30:40 crc kubenswrapper[4651]: I1011 05:30:40.146805 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cj965\" (UniqueName: \"kubernetes.io/projected/c2b4841c-2ea3-464b-8147-a24437d0d079-kube-api-access-cj965\") pod \"nova-edpm-deployment-openstack-edpm-ipam-t4rtg\" (UID: \"c2b4841c-2ea3-464b-8147-a24437d0d079\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t4rtg" Oct 11 05:30:40 crc kubenswrapper[4651]: I1011 05:30:40.292985 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t4rtg" Oct 11 05:30:40 crc kubenswrapper[4651]: I1011 05:30:40.700649 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-t4rtg"] Oct 11 05:30:40 crc kubenswrapper[4651]: I1011 05:30:40.821211 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t4rtg" event={"ID":"c2b4841c-2ea3-464b-8147-a24437d0d079","Type":"ContainerStarted","Data":"6f4c6827654e8e8825d8b785c2ca26caa9673e13f2b4ba3c597540698183a50f"} Oct 11 05:30:41 crc kubenswrapper[4651]: I1011 05:30:41.833335 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t4rtg" event={"ID":"c2b4841c-2ea3-464b-8147-a24437d0d079","Type":"ContainerStarted","Data":"e80b6331191cbce4db88f4c1921bdac95cd1f0dd618526c6922b4d7423604875"} Oct 11 05:30:41 crc kubenswrapper[4651]: I1011 05:30:41.866079 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t4rtg" podStartSLOduration=2.3551230690000002 podStartE2EDuration="2.866053457s" podCreationTimestamp="2025-10-11 05:30:39 +0000 UTC" firstStartedPulling="2025-10-11 05:30:40.707129176 +0000 UTC m=+2361.603361972" lastFinishedPulling="2025-10-11 05:30:41.218059524 +0000 UTC m=+2362.114292360" observedRunningTime="2025-10-11 05:30:41.857970048 +0000 UTC m=+2362.754202854" watchObservedRunningTime="2025-10-11 05:30:41.866053457 +0000 UTC m=+2362.762286273" Oct 11 05:30:48 crc kubenswrapper[4651]: I1011 05:30:48.870467 4651 scope.go:117] "RemoveContainer" containerID="697c27631e7481f73fa15a7b6752c864f41b48d365447d86c85453a3dee57ea6" Oct 11 05:30:48 crc kubenswrapper[4651]: E1011 05:30:48.872409 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 05:31:02 crc kubenswrapper[4651]: I1011 05:31:02.870295 4651 scope.go:117] "RemoveContainer" containerID="697c27631e7481f73fa15a7b6752c864f41b48d365447d86c85453a3dee57ea6" Oct 11 05:31:02 crc kubenswrapper[4651]: E1011 05:31:02.871432 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 05:31:14 crc kubenswrapper[4651]: I1011 05:31:14.870594 4651 scope.go:117] "RemoveContainer" containerID="697c27631e7481f73fa15a7b6752c864f41b48d365447d86c85453a3dee57ea6" Oct 11 05:31:14 crc kubenswrapper[4651]: E1011 05:31:14.871740 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 05:31:28 crc kubenswrapper[4651]: I1011 05:31:28.870539 4651 scope.go:117] "RemoveContainer" containerID="697c27631e7481f73fa15a7b6752c864f41b48d365447d86c85453a3dee57ea6" Oct 11 05:31:28 crc kubenswrapper[4651]: E1011 05:31:28.871923 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 05:31:42 crc kubenswrapper[4651]: I1011 05:31:42.869637 4651 scope.go:117] "RemoveContainer" containerID="697c27631e7481f73fa15a7b6752c864f41b48d365447d86c85453a3dee57ea6" Oct 11 05:31:42 crc kubenswrapper[4651]: E1011 05:31:42.870717 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 05:31:53 crc kubenswrapper[4651]: I1011 05:31:53.870461 4651 scope.go:117] "RemoveContainer" containerID="697c27631e7481f73fa15a7b6752c864f41b48d365447d86c85453a3dee57ea6" Oct 11 05:31:53 crc kubenswrapper[4651]: E1011 05:31:53.871482 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 05:32:05 crc kubenswrapper[4651]: I1011 05:32:05.869666 4651 scope.go:117] "RemoveContainer" containerID="697c27631e7481f73fa15a7b6752c864f41b48d365447d86c85453a3dee57ea6" Oct 11 05:32:05 crc kubenswrapper[4651]: E1011 05:32:05.870717 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 05:32:20 crc kubenswrapper[4651]: I1011 05:32:20.869563 4651 scope.go:117] "RemoveContainer" containerID="697c27631e7481f73fa15a7b6752c864f41b48d365447d86c85453a3dee57ea6" Oct 11 05:32:20 crc kubenswrapper[4651]: E1011 05:32:20.870757 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 05:32:33 crc kubenswrapper[4651]: I1011 05:32:33.873155 4651 scope.go:117] "RemoveContainer" containerID="697c27631e7481f73fa15a7b6752c864f41b48d365447d86c85453a3dee57ea6" Oct 11 05:32:33 crc kubenswrapper[4651]: E1011 05:32:33.874341 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 05:32:47 crc kubenswrapper[4651]: I1011 05:32:47.870257 4651 scope.go:117] "RemoveContainer" containerID="697c27631e7481f73fa15a7b6752c864f41b48d365447d86c85453a3dee57ea6" Oct 11 05:32:47 crc kubenswrapper[4651]: E1011 05:32:47.871594 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 05:33:02 crc kubenswrapper[4651]: I1011 05:33:02.869764 4651 scope.go:117] "RemoveContainer" containerID="697c27631e7481f73fa15a7b6752c864f41b48d365447d86c85453a3dee57ea6" Oct 11 05:33:02 crc kubenswrapper[4651]: E1011 05:33:02.870603 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 05:33:13 crc kubenswrapper[4651]: I1011 05:33:13.869866 4651 scope.go:117] "RemoveContainer" containerID="697c27631e7481f73fa15a7b6752c864f41b48d365447d86c85453a3dee57ea6" Oct 11 05:33:13 crc kubenswrapper[4651]: E1011 05:33:13.870932 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 05:33:24 crc kubenswrapper[4651]: I1011 05:33:24.873966 4651 scope.go:117] "RemoveContainer" containerID="697c27631e7481f73fa15a7b6752c864f41b48d365447d86c85453a3dee57ea6" Oct 11 05:33:24 crc kubenswrapper[4651]: E1011 05:33:24.875067 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 05:33:35 crc kubenswrapper[4651]: I1011 05:33:35.870311 4651 scope.go:117] "RemoveContainer" containerID="697c27631e7481f73fa15a7b6752c864f41b48d365447d86c85453a3dee57ea6" Oct 11 05:33:35 crc kubenswrapper[4651]: E1011 05:33:35.871875 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 05:33:48 crc kubenswrapper[4651]: I1011 05:33:48.870129 4651 scope.go:117] "RemoveContainer" containerID="697c27631e7481f73fa15a7b6752c864f41b48d365447d86c85453a3dee57ea6" Oct 11 05:33:50 crc kubenswrapper[4651]: I1011 05:33:50.136414 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" event={"ID":"519a1ae1-e964-48b0-8b61-835146df28c1","Type":"ContainerStarted","Data":"628909be9c56f029477d2c09fa758e8bc50148135b9cfb5a07c99c0471644ba6"} Oct 11 05:34:15 crc kubenswrapper[4651]: I1011 05:34:15.422260 4651 generic.go:334] "Generic (PLEG): container finished" podID="c2b4841c-2ea3-464b-8147-a24437d0d079" containerID="e80b6331191cbce4db88f4c1921bdac95cd1f0dd618526c6922b4d7423604875" exitCode=0 Oct 11 05:34:15 crc kubenswrapper[4651]: I1011 05:34:15.422455 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t4rtg" event={"ID":"c2b4841c-2ea3-464b-8147-a24437d0d079","Type":"ContainerDied","Data":"e80b6331191cbce4db88f4c1921bdac95cd1f0dd618526c6922b4d7423604875"} Oct 11 05:34:16 crc kubenswrapper[4651]: I1011 05:34:16.940879 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t4rtg" Oct 11 05:34:17 crc kubenswrapper[4651]: I1011 05:34:17.135564 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/c2b4841c-2ea3-464b-8147-a24437d0d079-nova-cell1-compute-config-1\") pod \"c2b4841c-2ea3-464b-8147-a24437d0d079\" (UID: \"c2b4841c-2ea3-464b-8147-a24437d0d079\") " Oct 11 05:34:17 crc kubenswrapper[4651]: I1011 05:34:17.135636 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cj965\" (UniqueName: \"kubernetes.io/projected/c2b4841c-2ea3-464b-8147-a24437d0d079-kube-api-access-cj965\") pod \"c2b4841c-2ea3-464b-8147-a24437d0d079\" (UID: \"c2b4841c-2ea3-464b-8147-a24437d0d079\") " Oct 11 05:34:17 crc kubenswrapper[4651]: I1011 05:34:17.135697 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/c2b4841c-2ea3-464b-8147-a24437d0d079-nova-cell1-compute-config-0\") pod \"c2b4841c-2ea3-464b-8147-a24437d0d079\" (UID: \"c2b4841c-2ea3-464b-8147-a24437d0d079\") " Oct 11 05:34:17 crc kubenswrapper[4651]: I1011 05:34:17.135726 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c2b4841c-2ea3-464b-8147-a24437d0d079-ssh-key\") pod \"c2b4841c-2ea3-464b-8147-a24437d0d079\" (UID: \"c2b4841c-2ea3-464b-8147-a24437d0d079\") " Oct 11 05:34:17 crc kubenswrapper[4651]: I1011 05:34:17.135765 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/c2b4841c-2ea3-464b-8147-a24437d0d079-nova-migration-ssh-key-0\") pod \"c2b4841c-2ea3-464b-8147-a24437d0d079\" (UID: \"c2b4841c-2ea3-464b-8147-a24437d0d079\") " Oct 11 05:34:17 crc kubenswrapper[4651]: I1011 05:34:17.135803 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2b4841c-2ea3-464b-8147-a24437d0d079-nova-combined-ca-bundle\") pod \"c2b4841c-2ea3-464b-8147-a24437d0d079\" (UID: \"c2b4841c-2ea3-464b-8147-a24437d0d079\") " Oct 11 05:34:17 crc kubenswrapper[4651]: I1011 05:34:17.135881 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/c2b4841c-2ea3-464b-8147-a24437d0d079-nova-extra-config-0\") pod \"c2b4841c-2ea3-464b-8147-a24437d0d079\" (UID: \"c2b4841c-2ea3-464b-8147-a24437d0d079\") " Oct 11 05:34:17 crc kubenswrapper[4651]: I1011 05:34:17.135934 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c2b4841c-2ea3-464b-8147-a24437d0d079-inventory\") pod \"c2b4841c-2ea3-464b-8147-a24437d0d079\" (UID: \"c2b4841c-2ea3-464b-8147-a24437d0d079\") " Oct 11 05:34:17 crc kubenswrapper[4651]: I1011 05:34:17.135976 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/c2b4841c-2ea3-464b-8147-a24437d0d079-nova-migration-ssh-key-1\") pod \"c2b4841c-2ea3-464b-8147-a24437d0d079\" (UID: \"c2b4841c-2ea3-464b-8147-a24437d0d079\") " Oct 11 05:34:17 crc kubenswrapper[4651]: I1011 05:34:17.144899 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c2b4841c-2ea3-464b-8147-a24437d0d079-kube-api-access-cj965" (OuterVolumeSpecName: "kube-api-access-cj965") pod "c2b4841c-2ea3-464b-8147-a24437d0d079" (UID: "c2b4841c-2ea3-464b-8147-a24437d0d079"). InnerVolumeSpecName "kube-api-access-cj965". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:34:17 crc kubenswrapper[4651]: I1011 05:34:17.149548 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c2b4841c-2ea3-464b-8147-a24437d0d079-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "c2b4841c-2ea3-464b-8147-a24437d0d079" (UID: "c2b4841c-2ea3-464b-8147-a24437d0d079"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:34:17 crc kubenswrapper[4651]: I1011 05:34:17.166200 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c2b4841c-2ea3-464b-8147-a24437d0d079-nova-extra-config-0" (OuterVolumeSpecName: "nova-extra-config-0") pod "c2b4841c-2ea3-464b-8147-a24437d0d079" (UID: "c2b4841c-2ea3-464b-8147-a24437d0d079"). InnerVolumeSpecName "nova-extra-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:34:17 crc kubenswrapper[4651]: I1011 05:34:17.166566 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c2b4841c-2ea3-464b-8147-a24437d0d079-inventory" (OuterVolumeSpecName: "inventory") pod "c2b4841c-2ea3-464b-8147-a24437d0d079" (UID: "c2b4841c-2ea3-464b-8147-a24437d0d079"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:34:17 crc kubenswrapper[4651]: I1011 05:34:17.170129 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c2b4841c-2ea3-464b-8147-a24437d0d079-nova-cell1-compute-config-1" (OuterVolumeSpecName: "nova-cell1-compute-config-1") pod "c2b4841c-2ea3-464b-8147-a24437d0d079" (UID: "c2b4841c-2ea3-464b-8147-a24437d0d079"). InnerVolumeSpecName "nova-cell1-compute-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:34:17 crc kubenswrapper[4651]: I1011 05:34:17.177570 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c2b4841c-2ea3-464b-8147-a24437d0d079-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "c2b4841c-2ea3-464b-8147-a24437d0d079" (UID: "c2b4841c-2ea3-464b-8147-a24437d0d079"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:34:17 crc kubenswrapper[4651]: I1011 05:34:17.182738 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c2b4841c-2ea3-464b-8147-a24437d0d079-nova-migration-ssh-key-1" (OuterVolumeSpecName: "nova-migration-ssh-key-1") pod "c2b4841c-2ea3-464b-8147-a24437d0d079" (UID: "c2b4841c-2ea3-464b-8147-a24437d0d079"). InnerVolumeSpecName "nova-migration-ssh-key-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:34:17 crc kubenswrapper[4651]: I1011 05:34:17.187476 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c2b4841c-2ea3-464b-8147-a24437d0d079-nova-migration-ssh-key-0" (OuterVolumeSpecName: "nova-migration-ssh-key-0") pod "c2b4841c-2ea3-464b-8147-a24437d0d079" (UID: "c2b4841c-2ea3-464b-8147-a24437d0d079"). InnerVolumeSpecName "nova-migration-ssh-key-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:34:17 crc kubenswrapper[4651]: I1011 05:34:17.198778 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c2b4841c-2ea3-464b-8147-a24437d0d079-nova-cell1-compute-config-0" (OuterVolumeSpecName: "nova-cell1-compute-config-0") pod "c2b4841c-2ea3-464b-8147-a24437d0d079" (UID: "c2b4841c-2ea3-464b-8147-a24437d0d079"). InnerVolumeSpecName "nova-cell1-compute-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:34:17 crc kubenswrapper[4651]: I1011 05:34:17.242947 4651 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/c2b4841c-2ea3-464b-8147-a24437d0d079-nova-cell1-compute-config-1\") on node \"crc\" DevicePath \"\"" Oct 11 05:34:17 crc kubenswrapper[4651]: I1011 05:34:17.242986 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cj965\" (UniqueName: \"kubernetes.io/projected/c2b4841c-2ea3-464b-8147-a24437d0d079-kube-api-access-cj965\") on node \"crc\" DevicePath \"\"" Oct 11 05:34:17 crc kubenswrapper[4651]: I1011 05:34:17.242998 4651 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/c2b4841c-2ea3-464b-8147-a24437d0d079-nova-cell1-compute-config-0\") on node \"crc\" DevicePath \"\"" Oct 11 05:34:17 crc kubenswrapper[4651]: I1011 05:34:17.243013 4651 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c2b4841c-2ea3-464b-8147-a24437d0d079-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 11 05:34:17 crc kubenswrapper[4651]: I1011 05:34:17.243028 4651 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/c2b4841c-2ea3-464b-8147-a24437d0d079-nova-migration-ssh-key-0\") on node \"crc\" DevicePath \"\"" Oct 11 05:34:17 crc kubenswrapper[4651]: I1011 05:34:17.243041 4651 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2b4841c-2ea3-464b-8147-a24437d0d079-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 05:34:17 crc kubenswrapper[4651]: I1011 05:34:17.243053 4651 reconciler_common.go:293] "Volume detached for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/c2b4841c-2ea3-464b-8147-a24437d0d079-nova-extra-config-0\") on node \"crc\" DevicePath \"\"" Oct 11 05:34:17 crc kubenswrapper[4651]: I1011 05:34:17.243066 4651 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c2b4841c-2ea3-464b-8147-a24437d0d079-inventory\") on node \"crc\" DevicePath \"\"" Oct 11 05:34:17 crc kubenswrapper[4651]: I1011 05:34:17.243077 4651 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/c2b4841c-2ea3-464b-8147-a24437d0d079-nova-migration-ssh-key-1\") on node \"crc\" DevicePath \"\"" Oct 11 05:34:17 crc kubenswrapper[4651]: I1011 05:34:17.442101 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t4rtg" event={"ID":"c2b4841c-2ea3-464b-8147-a24437d0d079","Type":"ContainerDied","Data":"6f4c6827654e8e8825d8b785c2ca26caa9673e13f2b4ba3c597540698183a50f"} Oct 11 05:34:17 crc kubenswrapper[4651]: I1011 05:34:17.442141 4651 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6f4c6827654e8e8825d8b785c2ca26caa9673e13f2b4ba3c597540698183a50f" Oct 11 05:34:17 crc kubenswrapper[4651]: I1011 05:34:17.442218 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t4rtg" Oct 11 05:34:17 crc kubenswrapper[4651]: I1011 05:34:17.556874 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-lzfhh"] Oct 11 05:34:17 crc kubenswrapper[4651]: E1011 05:34:17.557250 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c2b4841c-2ea3-464b-8147-a24437d0d079" containerName="nova-edpm-deployment-openstack-edpm-ipam" Oct 11 05:34:17 crc kubenswrapper[4651]: I1011 05:34:17.557266 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="c2b4841c-2ea3-464b-8147-a24437d0d079" containerName="nova-edpm-deployment-openstack-edpm-ipam" Oct 11 05:34:17 crc kubenswrapper[4651]: I1011 05:34:17.557451 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="c2b4841c-2ea3-464b-8147-a24437d0d079" containerName="nova-edpm-deployment-openstack-edpm-ipam" Oct 11 05:34:17 crc kubenswrapper[4651]: I1011 05:34:17.558009 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-lzfhh" Oct 11 05:34:17 crc kubenswrapper[4651]: I1011 05:34:17.559607 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-r489p" Oct 11 05:34:17 crc kubenswrapper[4651]: I1011 05:34:17.560267 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 11 05:34:17 crc kubenswrapper[4651]: I1011 05:34:17.560405 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 11 05:34:17 crc kubenswrapper[4651]: I1011 05:34:17.561036 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 11 05:34:17 crc kubenswrapper[4651]: I1011 05:34:17.563308 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-compute-config-data" Oct 11 05:34:17 crc kubenswrapper[4651]: I1011 05:34:17.567969 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-lzfhh"] Oct 11 05:34:17 crc kubenswrapper[4651]: I1011 05:34:17.753172 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/352a8263-3fc8-49fc-bc0b-6b5671d02fde-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-lzfhh\" (UID: \"352a8263-3fc8-49fc-bc0b-6b5671d02fde\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-lzfhh" Oct 11 05:34:17 crc kubenswrapper[4651]: I1011 05:34:17.753458 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/352a8263-3fc8-49fc-bc0b-6b5671d02fde-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-lzfhh\" (UID: \"352a8263-3fc8-49fc-bc0b-6b5671d02fde\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-lzfhh" Oct 11 05:34:17 crc kubenswrapper[4651]: I1011 05:34:17.753539 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/352a8263-3fc8-49fc-bc0b-6b5671d02fde-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-lzfhh\" (UID: \"352a8263-3fc8-49fc-bc0b-6b5671d02fde\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-lzfhh" Oct 11 05:34:17 crc kubenswrapper[4651]: I1011 05:34:17.753765 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/352a8263-3fc8-49fc-bc0b-6b5671d02fde-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-lzfhh\" (UID: \"352a8263-3fc8-49fc-bc0b-6b5671d02fde\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-lzfhh" Oct 11 05:34:17 crc kubenswrapper[4651]: I1011 05:34:17.753929 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lnqr4\" (UniqueName: \"kubernetes.io/projected/352a8263-3fc8-49fc-bc0b-6b5671d02fde-kube-api-access-lnqr4\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-lzfhh\" (UID: \"352a8263-3fc8-49fc-bc0b-6b5671d02fde\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-lzfhh" Oct 11 05:34:17 crc kubenswrapper[4651]: I1011 05:34:17.754055 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/352a8263-3fc8-49fc-bc0b-6b5671d02fde-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-lzfhh\" (UID: \"352a8263-3fc8-49fc-bc0b-6b5671d02fde\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-lzfhh" Oct 11 05:34:17 crc kubenswrapper[4651]: I1011 05:34:17.754088 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/352a8263-3fc8-49fc-bc0b-6b5671d02fde-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-lzfhh\" (UID: \"352a8263-3fc8-49fc-bc0b-6b5671d02fde\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-lzfhh" Oct 11 05:34:17 crc kubenswrapper[4651]: I1011 05:34:17.856028 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/352a8263-3fc8-49fc-bc0b-6b5671d02fde-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-lzfhh\" (UID: \"352a8263-3fc8-49fc-bc0b-6b5671d02fde\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-lzfhh" Oct 11 05:34:17 crc kubenswrapper[4651]: I1011 05:34:17.856274 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lnqr4\" (UniqueName: \"kubernetes.io/projected/352a8263-3fc8-49fc-bc0b-6b5671d02fde-kube-api-access-lnqr4\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-lzfhh\" (UID: \"352a8263-3fc8-49fc-bc0b-6b5671d02fde\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-lzfhh" Oct 11 05:34:17 crc kubenswrapper[4651]: I1011 05:34:17.856435 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/352a8263-3fc8-49fc-bc0b-6b5671d02fde-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-lzfhh\" (UID: \"352a8263-3fc8-49fc-bc0b-6b5671d02fde\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-lzfhh" Oct 11 05:34:17 crc kubenswrapper[4651]: I1011 05:34:17.856507 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/352a8263-3fc8-49fc-bc0b-6b5671d02fde-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-lzfhh\" (UID: \"352a8263-3fc8-49fc-bc0b-6b5671d02fde\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-lzfhh" Oct 11 05:34:17 crc kubenswrapper[4651]: I1011 05:34:17.856608 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/352a8263-3fc8-49fc-bc0b-6b5671d02fde-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-lzfhh\" (UID: \"352a8263-3fc8-49fc-bc0b-6b5671d02fde\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-lzfhh" Oct 11 05:34:17 crc kubenswrapper[4651]: I1011 05:34:17.856705 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/352a8263-3fc8-49fc-bc0b-6b5671d02fde-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-lzfhh\" (UID: \"352a8263-3fc8-49fc-bc0b-6b5671d02fde\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-lzfhh" Oct 11 05:34:17 crc kubenswrapper[4651]: I1011 05:34:17.856798 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/352a8263-3fc8-49fc-bc0b-6b5671d02fde-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-lzfhh\" (UID: \"352a8263-3fc8-49fc-bc0b-6b5671d02fde\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-lzfhh" Oct 11 05:34:17 crc kubenswrapper[4651]: I1011 05:34:17.861311 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/352a8263-3fc8-49fc-bc0b-6b5671d02fde-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-lzfhh\" (UID: \"352a8263-3fc8-49fc-bc0b-6b5671d02fde\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-lzfhh" Oct 11 05:34:17 crc kubenswrapper[4651]: I1011 05:34:17.862059 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/352a8263-3fc8-49fc-bc0b-6b5671d02fde-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-lzfhh\" (UID: \"352a8263-3fc8-49fc-bc0b-6b5671d02fde\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-lzfhh" Oct 11 05:34:17 crc kubenswrapper[4651]: I1011 05:34:17.863849 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/352a8263-3fc8-49fc-bc0b-6b5671d02fde-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-lzfhh\" (UID: \"352a8263-3fc8-49fc-bc0b-6b5671d02fde\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-lzfhh" Oct 11 05:34:17 crc kubenswrapper[4651]: I1011 05:34:17.863980 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/352a8263-3fc8-49fc-bc0b-6b5671d02fde-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-lzfhh\" (UID: \"352a8263-3fc8-49fc-bc0b-6b5671d02fde\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-lzfhh" Oct 11 05:34:17 crc kubenswrapper[4651]: I1011 05:34:17.864140 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/352a8263-3fc8-49fc-bc0b-6b5671d02fde-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-lzfhh\" (UID: \"352a8263-3fc8-49fc-bc0b-6b5671d02fde\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-lzfhh" Oct 11 05:34:17 crc kubenswrapper[4651]: I1011 05:34:17.870212 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/352a8263-3fc8-49fc-bc0b-6b5671d02fde-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-lzfhh\" (UID: \"352a8263-3fc8-49fc-bc0b-6b5671d02fde\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-lzfhh" Oct 11 05:34:17 crc kubenswrapper[4651]: I1011 05:34:17.884792 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lnqr4\" (UniqueName: \"kubernetes.io/projected/352a8263-3fc8-49fc-bc0b-6b5671d02fde-kube-api-access-lnqr4\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-lzfhh\" (UID: \"352a8263-3fc8-49fc-bc0b-6b5671d02fde\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-lzfhh" Oct 11 05:34:18 crc kubenswrapper[4651]: I1011 05:34:18.173093 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-lzfhh" Oct 11 05:34:18 crc kubenswrapper[4651]: I1011 05:34:18.773422 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-lzfhh"] Oct 11 05:34:18 crc kubenswrapper[4651]: W1011 05:34:18.782855 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod352a8263_3fc8_49fc_bc0b_6b5671d02fde.slice/crio-e716843ea0bab24d3d60b23c04d0cdf1708f58e3d7bf64ef49772f4c3348f0c7 WatchSource:0}: Error finding container e716843ea0bab24d3d60b23c04d0cdf1708f58e3d7bf64ef49772f4c3348f0c7: Status 404 returned error can't find the container with id e716843ea0bab24d3d60b23c04d0cdf1708f58e3d7bf64ef49772f4c3348f0c7 Oct 11 05:34:18 crc kubenswrapper[4651]: I1011 05:34:18.787141 4651 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 11 05:34:19 crc kubenswrapper[4651]: I1011 05:34:19.462754 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-lzfhh" event={"ID":"352a8263-3fc8-49fc-bc0b-6b5671d02fde","Type":"ContainerStarted","Data":"e716843ea0bab24d3d60b23c04d0cdf1708f58e3d7bf64ef49772f4c3348f0c7"} Oct 11 05:34:20 crc kubenswrapper[4651]: I1011 05:34:20.493388 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-lzfhh" event={"ID":"352a8263-3fc8-49fc-bc0b-6b5671d02fde","Type":"ContainerStarted","Data":"4badc43d817076d91a73b8cfe23a69666912ede12ac7f11132648d853132ae0c"} Oct 11 05:34:20 crc kubenswrapper[4651]: I1011 05:34:20.523599 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-lzfhh" podStartSLOduration=2.807886383 podStartE2EDuration="3.523581852s" podCreationTimestamp="2025-10-11 05:34:17 +0000 UTC" firstStartedPulling="2025-10-11 05:34:18.786683098 +0000 UTC m=+2579.682915924" lastFinishedPulling="2025-10-11 05:34:19.502378587 +0000 UTC m=+2580.398611393" observedRunningTime="2025-10-11 05:34:20.513680067 +0000 UTC m=+2581.409912863" watchObservedRunningTime="2025-10-11 05:34:20.523581852 +0000 UTC m=+2581.419814648" Oct 11 05:35:39 crc kubenswrapper[4651]: I1011 05:35:39.305512 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-9zvxh"] Oct 11 05:35:39 crc kubenswrapper[4651]: I1011 05:35:39.308163 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9zvxh" Oct 11 05:35:39 crc kubenswrapper[4651]: I1011 05:35:39.319235 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-9zvxh"] Oct 11 05:35:39 crc kubenswrapper[4651]: I1011 05:35:39.493388 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h6sfd\" (UniqueName: \"kubernetes.io/projected/ab048169-7b91-4f7f-ab36-caf6d9f906c8-kube-api-access-h6sfd\") pod \"redhat-marketplace-9zvxh\" (UID: \"ab048169-7b91-4f7f-ab36-caf6d9f906c8\") " pod="openshift-marketplace/redhat-marketplace-9zvxh" Oct 11 05:35:39 crc kubenswrapper[4651]: I1011 05:35:39.494232 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ab048169-7b91-4f7f-ab36-caf6d9f906c8-catalog-content\") pod \"redhat-marketplace-9zvxh\" (UID: \"ab048169-7b91-4f7f-ab36-caf6d9f906c8\") " pod="openshift-marketplace/redhat-marketplace-9zvxh" Oct 11 05:35:39 crc kubenswrapper[4651]: I1011 05:35:39.494414 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ab048169-7b91-4f7f-ab36-caf6d9f906c8-utilities\") pod \"redhat-marketplace-9zvxh\" (UID: \"ab048169-7b91-4f7f-ab36-caf6d9f906c8\") " pod="openshift-marketplace/redhat-marketplace-9zvxh" Oct 11 05:35:39 crc kubenswrapper[4651]: I1011 05:35:39.596076 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h6sfd\" (UniqueName: \"kubernetes.io/projected/ab048169-7b91-4f7f-ab36-caf6d9f906c8-kube-api-access-h6sfd\") pod \"redhat-marketplace-9zvxh\" (UID: \"ab048169-7b91-4f7f-ab36-caf6d9f906c8\") " pod="openshift-marketplace/redhat-marketplace-9zvxh" Oct 11 05:35:39 crc kubenswrapper[4651]: I1011 05:35:39.596181 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ab048169-7b91-4f7f-ab36-caf6d9f906c8-catalog-content\") pod \"redhat-marketplace-9zvxh\" (UID: \"ab048169-7b91-4f7f-ab36-caf6d9f906c8\") " pod="openshift-marketplace/redhat-marketplace-9zvxh" Oct 11 05:35:39 crc kubenswrapper[4651]: I1011 05:35:39.596268 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ab048169-7b91-4f7f-ab36-caf6d9f906c8-utilities\") pod \"redhat-marketplace-9zvxh\" (UID: \"ab048169-7b91-4f7f-ab36-caf6d9f906c8\") " pod="openshift-marketplace/redhat-marketplace-9zvxh" Oct 11 05:35:39 crc kubenswrapper[4651]: I1011 05:35:39.596882 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ab048169-7b91-4f7f-ab36-caf6d9f906c8-utilities\") pod \"redhat-marketplace-9zvxh\" (UID: \"ab048169-7b91-4f7f-ab36-caf6d9f906c8\") " pod="openshift-marketplace/redhat-marketplace-9zvxh" Oct 11 05:35:39 crc kubenswrapper[4651]: I1011 05:35:39.596886 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ab048169-7b91-4f7f-ab36-caf6d9f906c8-catalog-content\") pod \"redhat-marketplace-9zvxh\" (UID: \"ab048169-7b91-4f7f-ab36-caf6d9f906c8\") " pod="openshift-marketplace/redhat-marketplace-9zvxh" Oct 11 05:35:39 crc kubenswrapper[4651]: I1011 05:35:39.618630 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h6sfd\" (UniqueName: \"kubernetes.io/projected/ab048169-7b91-4f7f-ab36-caf6d9f906c8-kube-api-access-h6sfd\") pod \"redhat-marketplace-9zvxh\" (UID: \"ab048169-7b91-4f7f-ab36-caf6d9f906c8\") " pod="openshift-marketplace/redhat-marketplace-9zvxh" Oct 11 05:35:39 crc kubenswrapper[4651]: I1011 05:35:39.633531 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9zvxh" Oct 11 05:35:40 crc kubenswrapper[4651]: I1011 05:35:40.110317 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-9zvxh"] Oct 11 05:35:40 crc kubenswrapper[4651]: I1011 05:35:40.440401 4651 generic.go:334] "Generic (PLEG): container finished" podID="ab048169-7b91-4f7f-ab36-caf6d9f906c8" containerID="a90be96d66fe4b9e21a6b48bedd0d0ae8ffba03a4d31f48353e78e02469914a5" exitCode=0 Oct 11 05:35:40 crc kubenswrapper[4651]: I1011 05:35:40.440437 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9zvxh" event={"ID":"ab048169-7b91-4f7f-ab36-caf6d9f906c8","Type":"ContainerDied","Data":"a90be96d66fe4b9e21a6b48bedd0d0ae8ffba03a4d31f48353e78e02469914a5"} Oct 11 05:35:40 crc kubenswrapper[4651]: I1011 05:35:40.440694 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9zvxh" event={"ID":"ab048169-7b91-4f7f-ab36-caf6d9f906c8","Type":"ContainerStarted","Data":"afc43cdad0765e98281ca045785713076b86245a56441c4da3eda1a2beb6162b"} Oct 11 05:35:41 crc kubenswrapper[4651]: I1011 05:35:41.456552 4651 generic.go:334] "Generic (PLEG): container finished" podID="ab048169-7b91-4f7f-ab36-caf6d9f906c8" containerID="952fc01cc0c87b6f279d7f2163e93ddbd17879dd7e347fcde391229ba157ff4f" exitCode=0 Oct 11 05:35:41 crc kubenswrapper[4651]: I1011 05:35:41.456727 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9zvxh" event={"ID":"ab048169-7b91-4f7f-ab36-caf6d9f906c8","Type":"ContainerDied","Data":"952fc01cc0c87b6f279d7f2163e93ddbd17879dd7e347fcde391229ba157ff4f"} Oct 11 05:35:42 crc kubenswrapper[4651]: I1011 05:35:42.466509 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9zvxh" event={"ID":"ab048169-7b91-4f7f-ab36-caf6d9f906c8","Type":"ContainerStarted","Data":"e4876b859a6751c4d817ff3b2f91a3a18276a1652ce997f0f9332ee824a2b8b1"} Oct 11 05:35:42 crc kubenswrapper[4651]: I1011 05:35:42.491136 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-9zvxh" podStartSLOduration=2.089068549 podStartE2EDuration="3.491117409s" podCreationTimestamp="2025-10-11 05:35:39 +0000 UTC" firstStartedPulling="2025-10-11 05:35:40.444740943 +0000 UTC m=+2661.340973739" lastFinishedPulling="2025-10-11 05:35:41.846789803 +0000 UTC m=+2662.743022599" observedRunningTime="2025-10-11 05:35:42.485163389 +0000 UTC m=+2663.381396225" watchObservedRunningTime="2025-10-11 05:35:42.491117409 +0000 UTC m=+2663.387350195" Oct 11 05:35:49 crc kubenswrapper[4651]: I1011 05:35:49.634263 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-9zvxh" Oct 11 05:35:49 crc kubenswrapper[4651]: I1011 05:35:49.634805 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-9zvxh" Oct 11 05:35:49 crc kubenswrapper[4651]: I1011 05:35:49.691259 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-9zvxh" Oct 11 05:35:50 crc kubenswrapper[4651]: I1011 05:35:50.646968 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-9zvxh" Oct 11 05:35:50 crc kubenswrapper[4651]: I1011 05:35:50.756235 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-9zvxh"] Oct 11 05:35:52 crc kubenswrapper[4651]: I1011 05:35:52.597468 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-9zvxh" podUID="ab048169-7b91-4f7f-ab36-caf6d9f906c8" containerName="registry-server" containerID="cri-o://e4876b859a6751c4d817ff3b2f91a3a18276a1652ce997f0f9332ee824a2b8b1" gracePeriod=2 Oct 11 05:35:53 crc kubenswrapper[4651]: I1011 05:35:53.060260 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9zvxh" Oct 11 05:35:53 crc kubenswrapper[4651]: I1011 05:35:53.224280 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h6sfd\" (UniqueName: \"kubernetes.io/projected/ab048169-7b91-4f7f-ab36-caf6d9f906c8-kube-api-access-h6sfd\") pod \"ab048169-7b91-4f7f-ab36-caf6d9f906c8\" (UID: \"ab048169-7b91-4f7f-ab36-caf6d9f906c8\") " Oct 11 05:35:53 crc kubenswrapper[4651]: I1011 05:35:53.224495 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ab048169-7b91-4f7f-ab36-caf6d9f906c8-utilities\") pod \"ab048169-7b91-4f7f-ab36-caf6d9f906c8\" (UID: \"ab048169-7b91-4f7f-ab36-caf6d9f906c8\") " Oct 11 05:35:53 crc kubenswrapper[4651]: I1011 05:35:53.224535 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ab048169-7b91-4f7f-ab36-caf6d9f906c8-catalog-content\") pod \"ab048169-7b91-4f7f-ab36-caf6d9f906c8\" (UID: \"ab048169-7b91-4f7f-ab36-caf6d9f906c8\") " Oct 11 05:35:53 crc kubenswrapper[4651]: I1011 05:35:53.230390 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ab048169-7b91-4f7f-ab36-caf6d9f906c8-utilities" (OuterVolumeSpecName: "utilities") pod "ab048169-7b91-4f7f-ab36-caf6d9f906c8" (UID: "ab048169-7b91-4f7f-ab36-caf6d9f906c8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 05:35:53 crc kubenswrapper[4651]: I1011 05:35:53.231927 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ab048169-7b91-4f7f-ab36-caf6d9f906c8-kube-api-access-h6sfd" (OuterVolumeSpecName: "kube-api-access-h6sfd") pod "ab048169-7b91-4f7f-ab36-caf6d9f906c8" (UID: "ab048169-7b91-4f7f-ab36-caf6d9f906c8"). InnerVolumeSpecName "kube-api-access-h6sfd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:35:53 crc kubenswrapper[4651]: I1011 05:35:53.244206 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ab048169-7b91-4f7f-ab36-caf6d9f906c8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ab048169-7b91-4f7f-ab36-caf6d9f906c8" (UID: "ab048169-7b91-4f7f-ab36-caf6d9f906c8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 05:35:53 crc kubenswrapper[4651]: I1011 05:35:53.327143 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h6sfd\" (UniqueName: \"kubernetes.io/projected/ab048169-7b91-4f7f-ab36-caf6d9f906c8-kube-api-access-h6sfd\") on node \"crc\" DevicePath \"\"" Oct 11 05:35:53 crc kubenswrapper[4651]: I1011 05:35:53.327205 4651 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ab048169-7b91-4f7f-ab36-caf6d9f906c8-utilities\") on node \"crc\" DevicePath \"\"" Oct 11 05:35:53 crc kubenswrapper[4651]: I1011 05:35:53.327226 4651 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ab048169-7b91-4f7f-ab36-caf6d9f906c8-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 11 05:35:53 crc kubenswrapper[4651]: I1011 05:35:53.615645 4651 generic.go:334] "Generic (PLEG): container finished" podID="ab048169-7b91-4f7f-ab36-caf6d9f906c8" containerID="e4876b859a6751c4d817ff3b2f91a3a18276a1652ce997f0f9332ee824a2b8b1" exitCode=0 Oct 11 05:35:53 crc kubenswrapper[4651]: I1011 05:35:53.616014 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9zvxh" event={"ID":"ab048169-7b91-4f7f-ab36-caf6d9f906c8","Type":"ContainerDied","Data":"e4876b859a6751c4d817ff3b2f91a3a18276a1652ce997f0f9332ee824a2b8b1"} Oct 11 05:35:53 crc kubenswrapper[4651]: I1011 05:35:53.616042 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9zvxh" event={"ID":"ab048169-7b91-4f7f-ab36-caf6d9f906c8","Type":"ContainerDied","Data":"afc43cdad0765e98281ca045785713076b86245a56441c4da3eda1a2beb6162b"} Oct 11 05:35:53 crc kubenswrapper[4651]: I1011 05:35:53.616061 4651 scope.go:117] "RemoveContainer" containerID="e4876b859a6751c4d817ff3b2f91a3a18276a1652ce997f0f9332ee824a2b8b1" Oct 11 05:35:53 crc kubenswrapper[4651]: I1011 05:35:53.616203 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9zvxh" Oct 11 05:35:53 crc kubenswrapper[4651]: I1011 05:35:53.657750 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-9zvxh"] Oct 11 05:35:53 crc kubenswrapper[4651]: I1011 05:35:53.661249 4651 scope.go:117] "RemoveContainer" containerID="952fc01cc0c87b6f279d7f2163e93ddbd17879dd7e347fcde391229ba157ff4f" Oct 11 05:35:53 crc kubenswrapper[4651]: I1011 05:35:53.664437 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-9zvxh"] Oct 11 05:35:53 crc kubenswrapper[4651]: I1011 05:35:53.694113 4651 scope.go:117] "RemoveContainer" containerID="a90be96d66fe4b9e21a6b48bedd0d0ae8ffba03a4d31f48353e78e02469914a5" Oct 11 05:35:53 crc kubenswrapper[4651]: I1011 05:35:53.788700 4651 scope.go:117] "RemoveContainer" containerID="e4876b859a6751c4d817ff3b2f91a3a18276a1652ce997f0f9332ee824a2b8b1" Oct 11 05:35:53 crc kubenswrapper[4651]: E1011 05:35:53.789484 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e4876b859a6751c4d817ff3b2f91a3a18276a1652ce997f0f9332ee824a2b8b1\": container with ID starting with e4876b859a6751c4d817ff3b2f91a3a18276a1652ce997f0f9332ee824a2b8b1 not found: ID does not exist" containerID="e4876b859a6751c4d817ff3b2f91a3a18276a1652ce997f0f9332ee824a2b8b1" Oct 11 05:35:53 crc kubenswrapper[4651]: I1011 05:35:53.789533 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e4876b859a6751c4d817ff3b2f91a3a18276a1652ce997f0f9332ee824a2b8b1"} err="failed to get container status \"e4876b859a6751c4d817ff3b2f91a3a18276a1652ce997f0f9332ee824a2b8b1\": rpc error: code = NotFound desc = could not find container \"e4876b859a6751c4d817ff3b2f91a3a18276a1652ce997f0f9332ee824a2b8b1\": container with ID starting with e4876b859a6751c4d817ff3b2f91a3a18276a1652ce997f0f9332ee824a2b8b1 not found: ID does not exist" Oct 11 05:35:53 crc kubenswrapper[4651]: I1011 05:35:53.789557 4651 scope.go:117] "RemoveContainer" containerID="952fc01cc0c87b6f279d7f2163e93ddbd17879dd7e347fcde391229ba157ff4f" Oct 11 05:35:53 crc kubenswrapper[4651]: E1011 05:35:53.790228 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"952fc01cc0c87b6f279d7f2163e93ddbd17879dd7e347fcde391229ba157ff4f\": container with ID starting with 952fc01cc0c87b6f279d7f2163e93ddbd17879dd7e347fcde391229ba157ff4f not found: ID does not exist" containerID="952fc01cc0c87b6f279d7f2163e93ddbd17879dd7e347fcde391229ba157ff4f" Oct 11 05:35:53 crc kubenswrapper[4651]: I1011 05:35:53.790324 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"952fc01cc0c87b6f279d7f2163e93ddbd17879dd7e347fcde391229ba157ff4f"} err="failed to get container status \"952fc01cc0c87b6f279d7f2163e93ddbd17879dd7e347fcde391229ba157ff4f\": rpc error: code = NotFound desc = could not find container \"952fc01cc0c87b6f279d7f2163e93ddbd17879dd7e347fcde391229ba157ff4f\": container with ID starting with 952fc01cc0c87b6f279d7f2163e93ddbd17879dd7e347fcde391229ba157ff4f not found: ID does not exist" Oct 11 05:35:53 crc kubenswrapper[4651]: I1011 05:35:53.790359 4651 scope.go:117] "RemoveContainer" containerID="a90be96d66fe4b9e21a6b48bedd0d0ae8ffba03a4d31f48353e78e02469914a5" Oct 11 05:35:53 crc kubenswrapper[4651]: E1011 05:35:53.790868 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a90be96d66fe4b9e21a6b48bedd0d0ae8ffba03a4d31f48353e78e02469914a5\": container with ID starting with a90be96d66fe4b9e21a6b48bedd0d0ae8ffba03a4d31f48353e78e02469914a5 not found: ID does not exist" containerID="a90be96d66fe4b9e21a6b48bedd0d0ae8ffba03a4d31f48353e78e02469914a5" Oct 11 05:35:53 crc kubenswrapper[4651]: I1011 05:35:53.790910 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a90be96d66fe4b9e21a6b48bedd0d0ae8ffba03a4d31f48353e78e02469914a5"} err="failed to get container status \"a90be96d66fe4b9e21a6b48bedd0d0ae8ffba03a4d31f48353e78e02469914a5\": rpc error: code = NotFound desc = could not find container \"a90be96d66fe4b9e21a6b48bedd0d0ae8ffba03a4d31f48353e78e02469914a5\": container with ID starting with a90be96d66fe4b9e21a6b48bedd0d0ae8ffba03a4d31f48353e78e02469914a5 not found: ID does not exist" Oct 11 05:35:53 crc kubenswrapper[4651]: I1011 05:35:53.883500 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ab048169-7b91-4f7f-ab36-caf6d9f906c8" path="/var/lib/kubelet/pods/ab048169-7b91-4f7f-ab36-caf6d9f906c8/volumes" Oct 11 05:36:16 crc kubenswrapper[4651]: I1011 05:36:16.310256 4651 patch_prober.go:28] interesting pod/machine-config-daemon-78jnv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 05:36:16 crc kubenswrapper[4651]: I1011 05:36:16.310903 4651 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 05:36:46 crc kubenswrapper[4651]: I1011 05:36:46.313625 4651 patch_prober.go:28] interesting pod/machine-config-daemon-78jnv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 05:36:46 crc kubenswrapper[4651]: I1011 05:36:46.314447 4651 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 05:36:53 crc kubenswrapper[4651]: I1011 05:36:53.956741 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-vs2v6"] Oct 11 05:36:53 crc kubenswrapper[4651]: E1011 05:36:53.958220 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab048169-7b91-4f7f-ab36-caf6d9f906c8" containerName="extract-utilities" Oct 11 05:36:53 crc kubenswrapper[4651]: I1011 05:36:53.958247 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab048169-7b91-4f7f-ab36-caf6d9f906c8" containerName="extract-utilities" Oct 11 05:36:53 crc kubenswrapper[4651]: E1011 05:36:53.958279 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab048169-7b91-4f7f-ab36-caf6d9f906c8" containerName="extract-content" Oct 11 05:36:53 crc kubenswrapper[4651]: I1011 05:36:53.958291 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab048169-7b91-4f7f-ab36-caf6d9f906c8" containerName="extract-content" Oct 11 05:36:53 crc kubenswrapper[4651]: E1011 05:36:53.958323 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab048169-7b91-4f7f-ab36-caf6d9f906c8" containerName="registry-server" Oct 11 05:36:53 crc kubenswrapper[4651]: I1011 05:36:53.958337 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab048169-7b91-4f7f-ab36-caf6d9f906c8" containerName="registry-server" Oct 11 05:36:53 crc kubenswrapper[4651]: I1011 05:36:53.958681 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab048169-7b91-4f7f-ab36-caf6d9f906c8" containerName="registry-server" Oct 11 05:36:53 crc kubenswrapper[4651]: I1011 05:36:53.962409 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vs2v6" Oct 11 05:36:53 crc kubenswrapper[4651]: I1011 05:36:53.972359 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-vs2v6"] Oct 11 05:36:54 crc kubenswrapper[4651]: I1011 05:36:54.050547 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/819c85ef-b451-47ed-88f6-1790f362d446-utilities\") pod \"community-operators-vs2v6\" (UID: \"819c85ef-b451-47ed-88f6-1790f362d446\") " pod="openshift-marketplace/community-operators-vs2v6" Oct 11 05:36:54 crc kubenswrapper[4651]: I1011 05:36:54.050772 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qnxg6\" (UniqueName: \"kubernetes.io/projected/819c85ef-b451-47ed-88f6-1790f362d446-kube-api-access-qnxg6\") pod \"community-operators-vs2v6\" (UID: \"819c85ef-b451-47ed-88f6-1790f362d446\") " pod="openshift-marketplace/community-operators-vs2v6" Oct 11 05:36:54 crc kubenswrapper[4651]: I1011 05:36:54.050844 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/819c85ef-b451-47ed-88f6-1790f362d446-catalog-content\") pod \"community-operators-vs2v6\" (UID: \"819c85ef-b451-47ed-88f6-1790f362d446\") " pod="openshift-marketplace/community-operators-vs2v6" Oct 11 05:36:54 crc kubenswrapper[4651]: I1011 05:36:54.153108 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qnxg6\" (UniqueName: \"kubernetes.io/projected/819c85ef-b451-47ed-88f6-1790f362d446-kube-api-access-qnxg6\") pod \"community-operators-vs2v6\" (UID: \"819c85ef-b451-47ed-88f6-1790f362d446\") " pod="openshift-marketplace/community-operators-vs2v6" Oct 11 05:36:54 crc kubenswrapper[4651]: I1011 05:36:54.153202 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/819c85ef-b451-47ed-88f6-1790f362d446-catalog-content\") pod \"community-operators-vs2v6\" (UID: \"819c85ef-b451-47ed-88f6-1790f362d446\") " pod="openshift-marketplace/community-operators-vs2v6" Oct 11 05:36:54 crc kubenswrapper[4651]: I1011 05:36:54.153272 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/819c85ef-b451-47ed-88f6-1790f362d446-utilities\") pod \"community-operators-vs2v6\" (UID: \"819c85ef-b451-47ed-88f6-1790f362d446\") " pod="openshift-marketplace/community-operators-vs2v6" Oct 11 05:36:54 crc kubenswrapper[4651]: I1011 05:36:54.153721 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/819c85ef-b451-47ed-88f6-1790f362d446-catalog-content\") pod \"community-operators-vs2v6\" (UID: \"819c85ef-b451-47ed-88f6-1790f362d446\") " pod="openshift-marketplace/community-operators-vs2v6" Oct 11 05:36:54 crc kubenswrapper[4651]: I1011 05:36:54.153727 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/819c85ef-b451-47ed-88f6-1790f362d446-utilities\") pod \"community-operators-vs2v6\" (UID: \"819c85ef-b451-47ed-88f6-1790f362d446\") " pod="openshift-marketplace/community-operators-vs2v6" Oct 11 05:36:54 crc kubenswrapper[4651]: I1011 05:36:54.175171 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qnxg6\" (UniqueName: \"kubernetes.io/projected/819c85ef-b451-47ed-88f6-1790f362d446-kube-api-access-qnxg6\") pod \"community-operators-vs2v6\" (UID: \"819c85ef-b451-47ed-88f6-1790f362d446\") " pod="openshift-marketplace/community-operators-vs2v6" Oct 11 05:36:54 crc kubenswrapper[4651]: I1011 05:36:54.335870 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vs2v6" Oct 11 05:36:54 crc kubenswrapper[4651]: I1011 05:36:54.973732 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-vs2v6"] Oct 11 05:36:54 crc kubenswrapper[4651]: W1011 05:36:54.974613 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod819c85ef_b451_47ed_88f6_1790f362d446.slice/crio-14c782ad29e214224a10b29e27cc833b5f36282d305f7cd0ec5f03ccf95d1fd3 WatchSource:0}: Error finding container 14c782ad29e214224a10b29e27cc833b5f36282d305f7cd0ec5f03ccf95d1fd3: Status 404 returned error can't find the container with id 14c782ad29e214224a10b29e27cc833b5f36282d305f7cd0ec5f03ccf95d1fd3 Oct 11 05:36:55 crc kubenswrapper[4651]: I1011 05:36:55.375143 4651 generic.go:334] "Generic (PLEG): container finished" podID="352a8263-3fc8-49fc-bc0b-6b5671d02fde" containerID="4badc43d817076d91a73b8cfe23a69666912ede12ac7f11132648d853132ae0c" exitCode=0 Oct 11 05:36:55 crc kubenswrapper[4651]: I1011 05:36:55.375266 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-lzfhh" event={"ID":"352a8263-3fc8-49fc-bc0b-6b5671d02fde","Type":"ContainerDied","Data":"4badc43d817076d91a73b8cfe23a69666912ede12ac7f11132648d853132ae0c"} Oct 11 05:36:55 crc kubenswrapper[4651]: I1011 05:36:55.379396 4651 generic.go:334] "Generic (PLEG): container finished" podID="819c85ef-b451-47ed-88f6-1790f362d446" containerID="ef00d17b465b0542ff17d2ec23dd2312a32d89fc7b900cb7a9ccdb5c8071816c" exitCode=0 Oct 11 05:36:55 crc kubenswrapper[4651]: I1011 05:36:55.379452 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vs2v6" event={"ID":"819c85ef-b451-47ed-88f6-1790f362d446","Type":"ContainerDied","Data":"ef00d17b465b0542ff17d2ec23dd2312a32d89fc7b900cb7a9ccdb5c8071816c"} Oct 11 05:36:55 crc kubenswrapper[4651]: I1011 05:36:55.379486 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vs2v6" event={"ID":"819c85ef-b451-47ed-88f6-1790f362d446","Type":"ContainerStarted","Data":"14c782ad29e214224a10b29e27cc833b5f36282d305f7cd0ec5f03ccf95d1fd3"} Oct 11 05:36:56 crc kubenswrapper[4651]: I1011 05:36:56.851758 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-lzfhh" Oct 11 05:36:56 crc kubenswrapper[4651]: I1011 05:36:56.917235 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/352a8263-3fc8-49fc-bc0b-6b5671d02fde-ssh-key\") pod \"352a8263-3fc8-49fc-bc0b-6b5671d02fde\" (UID: \"352a8263-3fc8-49fc-bc0b-6b5671d02fde\") " Oct 11 05:36:56 crc kubenswrapper[4651]: I1011 05:36:56.917306 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lnqr4\" (UniqueName: \"kubernetes.io/projected/352a8263-3fc8-49fc-bc0b-6b5671d02fde-kube-api-access-lnqr4\") pod \"352a8263-3fc8-49fc-bc0b-6b5671d02fde\" (UID: \"352a8263-3fc8-49fc-bc0b-6b5671d02fde\") " Oct 11 05:36:56 crc kubenswrapper[4651]: I1011 05:36:56.917353 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/352a8263-3fc8-49fc-bc0b-6b5671d02fde-telemetry-combined-ca-bundle\") pod \"352a8263-3fc8-49fc-bc0b-6b5671d02fde\" (UID: \"352a8263-3fc8-49fc-bc0b-6b5671d02fde\") " Oct 11 05:36:56 crc kubenswrapper[4651]: I1011 05:36:56.917404 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/352a8263-3fc8-49fc-bc0b-6b5671d02fde-inventory\") pod \"352a8263-3fc8-49fc-bc0b-6b5671d02fde\" (UID: \"352a8263-3fc8-49fc-bc0b-6b5671d02fde\") " Oct 11 05:36:56 crc kubenswrapper[4651]: I1011 05:36:56.917479 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/352a8263-3fc8-49fc-bc0b-6b5671d02fde-ceilometer-compute-config-data-0\") pod \"352a8263-3fc8-49fc-bc0b-6b5671d02fde\" (UID: \"352a8263-3fc8-49fc-bc0b-6b5671d02fde\") " Oct 11 05:36:56 crc kubenswrapper[4651]: I1011 05:36:56.917534 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/352a8263-3fc8-49fc-bc0b-6b5671d02fde-ceilometer-compute-config-data-1\") pod \"352a8263-3fc8-49fc-bc0b-6b5671d02fde\" (UID: \"352a8263-3fc8-49fc-bc0b-6b5671d02fde\") " Oct 11 05:36:56 crc kubenswrapper[4651]: I1011 05:36:56.917572 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/352a8263-3fc8-49fc-bc0b-6b5671d02fde-ceilometer-compute-config-data-2\") pod \"352a8263-3fc8-49fc-bc0b-6b5671d02fde\" (UID: \"352a8263-3fc8-49fc-bc0b-6b5671d02fde\") " Oct 11 05:36:56 crc kubenswrapper[4651]: I1011 05:36:56.923364 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/352a8263-3fc8-49fc-bc0b-6b5671d02fde-kube-api-access-lnqr4" (OuterVolumeSpecName: "kube-api-access-lnqr4") pod "352a8263-3fc8-49fc-bc0b-6b5671d02fde" (UID: "352a8263-3fc8-49fc-bc0b-6b5671d02fde"). InnerVolumeSpecName "kube-api-access-lnqr4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:36:56 crc kubenswrapper[4651]: I1011 05:36:56.923805 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/352a8263-3fc8-49fc-bc0b-6b5671d02fde-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "352a8263-3fc8-49fc-bc0b-6b5671d02fde" (UID: "352a8263-3fc8-49fc-bc0b-6b5671d02fde"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:36:56 crc kubenswrapper[4651]: I1011 05:36:56.946563 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/352a8263-3fc8-49fc-bc0b-6b5671d02fde-ceilometer-compute-config-data-2" (OuterVolumeSpecName: "ceilometer-compute-config-data-2") pod "352a8263-3fc8-49fc-bc0b-6b5671d02fde" (UID: "352a8263-3fc8-49fc-bc0b-6b5671d02fde"). InnerVolumeSpecName "ceilometer-compute-config-data-2". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:36:56 crc kubenswrapper[4651]: I1011 05:36:56.948312 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/352a8263-3fc8-49fc-bc0b-6b5671d02fde-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "352a8263-3fc8-49fc-bc0b-6b5671d02fde" (UID: "352a8263-3fc8-49fc-bc0b-6b5671d02fde"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:36:56 crc kubenswrapper[4651]: I1011 05:36:56.949607 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/352a8263-3fc8-49fc-bc0b-6b5671d02fde-ceilometer-compute-config-data-1" (OuterVolumeSpecName: "ceilometer-compute-config-data-1") pod "352a8263-3fc8-49fc-bc0b-6b5671d02fde" (UID: "352a8263-3fc8-49fc-bc0b-6b5671d02fde"). InnerVolumeSpecName "ceilometer-compute-config-data-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:36:56 crc kubenswrapper[4651]: I1011 05:36:56.954988 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/352a8263-3fc8-49fc-bc0b-6b5671d02fde-ceilometer-compute-config-data-0" (OuterVolumeSpecName: "ceilometer-compute-config-data-0") pod "352a8263-3fc8-49fc-bc0b-6b5671d02fde" (UID: "352a8263-3fc8-49fc-bc0b-6b5671d02fde"). InnerVolumeSpecName "ceilometer-compute-config-data-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:36:56 crc kubenswrapper[4651]: I1011 05:36:56.955702 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/352a8263-3fc8-49fc-bc0b-6b5671d02fde-inventory" (OuterVolumeSpecName: "inventory") pod "352a8263-3fc8-49fc-bc0b-6b5671d02fde" (UID: "352a8263-3fc8-49fc-bc0b-6b5671d02fde"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:36:57 crc kubenswrapper[4651]: I1011 05:36:57.019259 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lnqr4\" (UniqueName: \"kubernetes.io/projected/352a8263-3fc8-49fc-bc0b-6b5671d02fde-kube-api-access-lnqr4\") on node \"crc\" DevicePath \"\"" Oct 11 05:36:57 crc kubenswrapper[4651]: I1011 05:36:57.019299 4651 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/352a8263-3fc8-49fc-bc0b-6b5671d02fde-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 05:36:57 crc kubenswrapper[4651]: I1011 05:36:57.019313 4651 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/352a8263-3fc8-49fc-bc0b-6b5671d02fde-inventory\") on node \"crc\" DevicePath \"\"" Oct 11 05:36:57 crc kubenswrapper[4651]: I1011 05:36:57.019325 4651 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/352a8263-3fc8-49fc-bc0b-6b5671d02fde-ceilometer-compute-config-data-0\") on node \"crc\" DevicePath \"\"" Oct 11 05:36:57 crc kubenswrapper[4651]: I1011 05:36:57.019336 4651 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/352a8263-3fc8-49fc-bc0b-6b5671d02fde-ceilometer-compute-config-data-1\") on node \"crc\" DevicePath \"\"" Oct 11 05:36:57 crc kubenswrapper[4651]: I1011 05:36:57.019347 4651 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/352a8263-3fc8-49fc-bc0b-6b5671d02fde-ceilometer-compute-config-data-2\") on node \"crc\" DevicePath \"\"" Oct 11 05:36:57 crc kubenswrapper[4651]: I1011 05:36:57.019356 4651 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/352a8263-3fc8-49fc-bc0b-6b5671d02fde-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 11 05:36:57 crc kubenswrapper[4651]: I1011 05:36:57.397437 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-lzfhh" event={"ID":"352a8263-3fc8-49fc-bc0b-6b5671d02fde","Type":"ContainerDied","Data":"e716843ea0bab24d3d60b23c04d0cdf1708f58e3d7bf64ef49772f4c3348f0c7"} Oct 11 05:36:57 crc kubenswrapper[4651]: I1011 05:36:57.397479 4651 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e716843ea0bab24d3d60b23c04d0cdf1708f58e3d7bf64ef49772f4c3348f0c7" Oct 11 05:36:57 crc kubenswrapper[4651]: I1011 05:36:57.397568 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-lzfhh" Oct 11 05:37:00 crc kubenswrapper[4651]: I1011 05:37:00.437014 4651 generic.go:334] "Generic (PLEG): container finished" podID="819c85ef-b451-47ed-88f6-1790f362d446" containerID="1efe45f5ba5658a58730be16b0ac08f06b4637f2c40588d5336b8f15be632733" exitCode=0 Oct 11 05:37:00 crc kubenswrapper[4651]: I1011 05:37:00.437434 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vs2v6" event={"ID":"819c85ef-b451-47ed-88f6-1790f362d446","Type":"ContainerDied","Data":"1efe45f5ba5658a58730be16b0ac08f06b4637f2c40588d5336b8f15be632733"} Oct 11 05:37:01 crc kubenswrapper[4651]: I1011 05:37:01.448518 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vs2v6" event={"ID":"819c85ef-b451-47ed-88f6-1790f362d446","Type":"ContainerStarted","Data":"9b07c1ef97f44006d564158e466df64d16df8c0b23ada0875187ba31543e9137"} Oct 11 05:37:01 crc kubenswrapper[4651]: I1011 05:37:01.474422 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-vs2v6" podStartSLOduration=2.984526706 podStartE2EDuration="8.474404376s" podCreationTimestamp="2025-10-11 05:36:53 +0000 UTC" firstStartedPulling="2025-10-11 05:36:55.381488476 +0000 UTC m=+2736.277721312" lastFinishedPulling="2025-10-11 05:37:00.871366176 +0000 UTC m=+2741.767598982" observedRunningTime="2025-10-11 05:37:01.473684317 +0000 UTC m=+2742.369917153" watchObservedRunningTime="2025-10-11 05:37:01.474404376 +0000 UTC m=+2742.370637172" Oct 11 05:37:04 crc kubenswrapper[4651]: I1011 05:37:04.336378 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-vs2v6" Oct 11 05:37:04 crc kubenswrapper[4651]: I1011 05:37:04.336461 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-vs2v6" Oct 11 05:37:04 crc kubenswrapper[4651]: I1011 05:37:04.420507 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-vs2v6" Oct 11 05:37:05 crc kubenswrapper[4651]: I1011 05:37:05.088577 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-5zv8f"] Oct 11 05:37:05 crc kubenswrapper[4651]: E1011 05:37:05.089157 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="352a8263-3fc8-49fc-bc0b-6b5671d02fde" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Oct 11 05:37:05 crc kubenswrapper[4651]: I1011 05:37:05.089181 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="352a8263-3fc8-49fc-bc0b-6b5671d02fde" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Oct 11 05:37:05 crc kubenswrapper[4651]: I1011 05:37:05.089499 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="352a8263-3fc8-49fc-bc0b-6b5671d02fde" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Oct 11 05:37:05 crc kubenswrapper[4651]: I1011 05:37:05.092019 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5zv8f" Oct 11 05:37:05 crc kubenswrapper[4651]: I1011 05:37:05.104645 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-5zv8f"] Oct 11 05:37:05 crc kubenswrapper[4651]: I1011 05:37:05.116346 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vzxcz\" (UniqueName: \"kubernetes.io/projected/b5355026-b3ac-48d2-b93c-26319d272b62-kube-api-access-vzxcz\") pod \"redhat-operators-5zv8f\" (UID: \"b5355026-b3ac-48d2-b93c-26319d272b62\") " pod="openshift-marketplace/redhat-operators-5zv8f" Oct 11 05:37:05 crc kubenswrapper[4651]: I1011 05:37:05.116442 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b5355026-b3ac-48d2-b93c-26319d272b62-catalog-content\") pod \"redhat-operators-5zv8f\" (UID: \"b5355026-b3ac-48d2-b93c-26319d272b62\") " pod="openshift-marketplace/redhat-operators-5zv8f" Oct 11 05:37:05 crc kubenswrapper[4651]: I1011 05:37:05.116544 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b5355026-b3ac-48d2-b93c-26319d272b62-utilities\") pod \"redhat-operators-5zv8f\" (UID: \"b5355026-b3ac-48d2-b93c-26319d272b62\") " pod="openshift-marketplace/redhat-operators-5zv8f" Oct 11 05:37:05 crc kubenswrapper[4651]: I1011 05:37:05.218631 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b5355026-b3ac-48d2-b93c-26319d272b62-utilities\") pod \"redhat-operators-5zv8f\" (UID: \"b5355026-b3ac-48d2-b93c-26319d272b62\") " pod="openshift-marketplace/redhat-operators-5zv8f" Oct 11 05:37:05 crc kubenswrapper[4651]: I1011 05:37:05.218860 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vzxcz\" (UniqueName: \"kubernetes.io/projected/b5355026-b3ac-48d2-b93c-26319d272b62-kube-api-access-vzxcz\") pod \"redhat-operators-5zv8f\" (UID: \"b5355026-b3ac-48d2-b93c-26319d272b62\") " pod="openshift-marketplace/redhat-operators-5zv8f" Oct 11 05:37:05 crc kubenswrapper[4651]: I1011 05:37:05.218965 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b5355026-b3ac-48d2-b93c-26319d272b62-catalog-content\") pod \"redhat-operators-5zv8f\" (UID: \"b5355026-b3ac-48d2-b93c-26319d272b62\") " pod="openshift-marketplace/redhat-operators-5zv8f" Oct 11 05:37:05 crc kubenswrapper[4651]: I1011 05:37:05.219362 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b5355026-b3ac-48d2-b93c-26319d272b62-utilities\") pod \"redhat-operators-5zv8f\" (UID: \"b5355026-b3ac-48d2-b93c-26319d272b62\") " pod="openshift-marketplace/redhat-operators-5zv8f" Oct 11 05:37:05 crc kubenswrapper[4651]: I1011 05:37:05.219524 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b5355026-b3ac-48d2-b93c-26319d272b62-catalog-content\") pod \"redhat-operators-5zv8f\" (UID: \"b5355026-b3ac-48d2-b93c-26319d272b62\") " pod="openshift-marketplace/redhat-operators-5zv8f" Oct 11 05:37:05 crc kubenswrapper[4651]: I1011 05:37:05.245972 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vzxcz\" (UniqueName: \"kubernetes.io/projected/b5355026-b3ac-48d2-b93c-26319d272b62-kube-api-access-vzxcz\") pod \"redhat-operators-5zv8f\" (UID: \"b5355026-b3ac-48d2-b93c-26319d272b62\") " pod="openshift-marketplace/redhat-operators-5zv8f" Oct 11 05:37:05 crc kubenswrapper[4651]: I1011 05:37:05.417163 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5zv8f" Oct 11 05:37:05 crc kubenswrapper[4651]: I1011 05:37:05.898534 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-5zv8f"] Oct 11 05:37:06 crc kubenswrapper[4651]: I1011 05:37:06.503590 4651 generic.go:334] "Generic (PLEG): container finished" podID="b5355026-b3ac-48d2-b93c-26319d272b62" containerID="377d0efbe249de94faaac1925969ab48468c15dfb2a9e7da027f4971c334fcf7" exitCode=0 Oct 11 05:37:06 crc kubenswrapper[4651]: I1011 05:37:06.503662 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5zv8f" event={"ID":"b5355026-b3ac-48d2-b93c-26319d272b62","Type":"ContainerDied","Data":"377d0efbe249de94faaac1925969ab48468c15dfb2a9e7da027f4971c334fcf7"} Oct 11 05:37:06 crc kubenswrapper[4651]: I1011 05:37:06.503701 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5zv8f" event={"ID":"b5355026-b3ac-48d2-b93c-26319d272b62","Type":"ContainerStarted","Data":"8acdde45db2d65bd15f489ff1c9f283646d5f30def7e123b65fcfaf2abbdd21d"} Oct 11 05:37:07 crc kubenswrapper[4651]: I1011 05:37:07.516660 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5zv8f" event={"ID":"b5355026-b3ac-48d2-b93c-26319d272b62","Type":"ContainerStarted","Data":"db904ed300dc9216252e2e7034d6db69fa9c859f2148b44c716a5aea28c60a45"} Oct 11 05:37:09 crc kubenswrapper[4651]: I1011 05:37:09.547240 4651 generic.go:334] "Generic (PLEG): container finished" podID="b5355026-b3ac-48d2-b93c-26319d272b62" containerID="db904ed300dc9216252e2e7034d6db69fa9c859f2148b44c716a5aea28c60a45" exitCode=0 Oct 11 05:37:09 crc kubenswrapper[4651]: I1011 05:37:09.547308 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5zv8f" event={"ID":"b5355026-b3ac-48d2-b93c-26319d272b62","Type":"ContainerDied","Data":"db904ed300dc9216252e2e7034d6db69fa9c859f2148b44c716a5aea28c60a45"} Oct 11 05:37:10 crc kubenswrapper[4651]: I1011 05:37:10.559535 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5zv8f" event={"ID":"b5355026-b3ac-48d2-b93c-26319d272b62","Type":"ContainerStarted","Data":"1681f2968f2fee4861cb754136d71333f73451a9b5c5d1ed9de3409c191ac68e"} Oct 11 05:37:10 crc kubenswrapper[4651]: I1011 05:37:10.581922 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-5zv8f" podStartSLOduration=2.121460248 podStartE2EDuration="5.581903606s" podCreationTimestamp="2025-10-11 05:37:05 +0000 UTC" firstStartedPulling="2025-10-11 05:37:06.506836298 +0000 UTC m=+2747.403069104" lastFinishedPulling="2025-10-11 05:37:09.967279626 +0000 UTC m=+2750.863512462" observedRunningTime="2025-10-11 05:37:10.575340101 +0000 UTC m=+2751.471572917" watchObservedRunningTime="2025-10-11 05:37:10.581903606 +0000 UTC m=+2751.478136412" Oct 11 05:37:14 crc kubenswrapper[4651]: I1011 05:37:14.425290 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-vs2v6" Oct 11 05:37:14 crc kubenswrapper[4651]: I1011 05:37:14.538080 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-vs2v6"] Oct 11 05:37:14 crc kubenswrapper[4651]: I1011 05:37:14.606712 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-5tgq7"] Oct 11 05:37:14 crc kubenswrapper[4651]: I1011 05:37:14.607192 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-5tgq7" podUID="a3f42662-a802-49b3-bdb2-ad9746e0f0e7" containerName="registry-server" containerID="cri-o://6ab7e7fae09b706cf3825cf2d5f1396ca7c39d0f82a91bf2632709ec7452f6a9" gracePeriod=2 Oct 11 05:37:15 crc kubenswrapper[4651]: I1011 05:37:15.094672 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5tgq7" Oct 11 05:37:15 crc kubenswrapper[4651]: I1011 05:37:15.235611 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ndp6g\" (UniqueName: \"kubernetes.io/projected/a3f42662-a802-49b3-bdb2-ad9746e0f0e7-kube-api-access-ndp6g\") pod \"a3f42662-a802-49b3-bdb2-ad9746e0f0e7\" (UID: \"a3f42662-a802-49b3-bdb2-ad9746e0f0e7\") " Oct 11 05:37:15 crc kubenswrapper[4651]: I1011 05:37:15.235758 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a3f42662-a802-49b3-bdb2-ad9746e0f0e7-catalog-content\") pod \"a3f42662-a802-49b3-bdb2-ad9746e0f0e7\" (UID: \"a3f42662-a802-49b3-bdb2-ad9746e0f0e7\") " Oct 11 05:37:15 crc kubenswrapper[4651]: I1011 05:37:15.235903 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a3f42662-a802-49b3-bdb2-ad9746e0f0e7-utilities\") pod \"a3f42662-a802-49b3-bdb2-ad9746e0f0e7\" (UID: \"a3f42662-a802-49b3-bdb2-ad9746e0f0e7\") " Oct 11 05:37:15 crc kubenswrapper[4651]: I1011 05:37:15.236486 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a3f42662-a802-49b3-bdb2-ad9746e0f0e7-utilities" (OuterVolumeSpecName: "utilities") pod "a3f42662-a802-49b3-bdb2-ad9746e0f0e7" (UID: "a3f42662-a802-49b3-bdb2-ad9746e0f0e7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 05:37:15 crc kubenswrapper[4651]: I1011 05:37:15.236973 4651 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a3f42662-a802-49b3-bdb2-ad9746e0f0e7-utilities\") on node \"crc\" DevicePath \"\"" Oct 11 05:37:15 crc kubenswrapper[4651]: I1011 05:37:15.251995 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a3f42662-a802-49b3-bdb2-ad9746e0f0e7-kube-api-access-ndp6g" (OuterVolumeSpecName: "kube-api-access-ndp6g") pod "a3f42662-a802-49b3-bdb2-ad9746e0f0e7" (UID: "a3f42662-a802-49b3-bdb2-ad9746e0f0e7"). InnerVolumeSpecName "kube-api-access-ndp6g". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:37:15 crc kubenswrapper[4651]: I1011 05:37:15.285225 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a3f42662-a802-49b3-bdb2-ad9746e0f0e7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a3f42662-a802-49b3-bdb2-ad9746e0f0e7" (UID: "a3f42662-a802-49b3-bdb2-ad9746e0f0e7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 05:37:15 crc kubenswrapper[4651]: I1011 05:37:15.338585 4651 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a3f42662-a802-49b3-bdb2-ad9746e0f0e7-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 11 05:37:15 crc kubenswrapper[4651]: I1011 05:37:15.338626 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ndp6g\" (UniqueName: \"kubernetes.io/projected/a3f42662-a802-49b3-bdb2-ad9746e0f0e7-kube-api-access-ndp6g\") on node \"crc\" DevicePath \"\"" Oct 11 05:37:15 crc kubenswrapper[4651]: I1011 05:37:15.417657 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-5zv8f" Oct 11 05:37:15 crc kubenswrapper[4651]: I1011 05:37:15.417696 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-5zv8f" Oct 11 05:37:15 crc kubenswrapper[4651]: I1011 05:37:15.644416 4651 generic.go:334] "Generic (PLEG): container finished" podID="a3f42662-a802-49b3-bdb2-ad9746e0f0e7" containerID="6ab7e7fae09b706cf3825cf2d5f1396ca7c39d0f82a91bf2632709ec7452f6a9" exitCode=0 Oct 11 05:37:15 crc kubenswrapper[4651]: I1011 05:37:15.644472 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5tgq7" event={"ID":"a3f42662-a802-49b3-bdb2-ad9746e0f0e7","Type":"ContainerDied","Data":"6ab7e7fae09b706cf3825cf2d5f1396ca7c39d0f82a91bf2632709ec7452f6a9"} Oct 11 05:37:15 crc kubenswrapper[4651]: I1011 05:37:15.644513 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5tgq7" event={"ID":"a3f42662-a802-49b3-bdb2-ad9746e0f0e7","Type":"ContainerDied","Data":"6835b3b96f51d3e182f4fbca21cc06b8ac014e1db2531bc2367850cd548a3ec5"} Oct 11 05:37:15 crc kubenswrapper[4651]: I1011 05:37:15.644529 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5tgq7" Oct 11 05:37:15 crc kubenswrapper[4651]: I1011 05:37:15.644537 4651 scope.go:117] "RemoveContainer" containerID="6ab7e7fae09b706cf3825cf2d5f1396ca7c39d0f82a91bf2632709ec7452f6a9" Oct 11 05:37:15 crc kubenswrapper[4651]: I1011 05:37:15.686473 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-5tgq7"] Oct 11 05:37:15 crc kubenswrapper[4651]: I1011 05:37:15.691624 4651 scope.go:117] "RemoveContainer" containerID="fe727a964bdfa867705d5eb9aef73f5e65486805a0b20502c009d048a61f9578" Oct 11 05:37:15 crc kubenswrapper[4651]: I1011 05:37:15.697350 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-5tgq7"] Oct 11 05:37:15 crc kubenswrapper[4651]: I1011 05:37:15.732528 4651 scope.go:117] "RemoveContainer" containerID="a635ffd469e772c270979f38d96c66c7c5eade5afda1a16a2439106046712438" Oct 11 05:37:15 crc kubenswrapper[4651]: I1011 05:37:15.764155 4651 scope.go:117] "RemoveContainer" containerID="6ab7e7fae09b706cf3825cf2d5f1396ca7c39d0f82a91bf2632709ec7452f6a9" Oct 11 05:37:15 crc kubenswrapper[4651]: E1011 05:37:15.765601 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6ab7e7fae09b706cf3825cf2d5f1396ca7c39d0f82a91bf2632709ec7452f6a9\": container with ID starting with 6ab7e7fae09b706cf3825cf2d5f1396ca7c39d0f82a91bf2632709ec7452f6a9 not found: ID does not exist" containerID="6ab7e7fae09b706cf3825cf2d5f1396ca7c39d0f82a91bf2632709ec7452f6a9" Oct 11 05:37:15 crc kubenswrapper[4651]: I1011 05:37:15.765716 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6ab7e7fae09b706cf3825cf2d5f1396ca7c39d0f82a91bf2632709ec7452f6a9"} err="failed to get container status \"6ab7e7fae09b706cf3825cf2d5f1396ca7c39d0f82a91bf2632709ec7452f6a9\": rpc error: code = NotFound desc = could not find container \"6ab7e7fae09b706cf3825cf2d5f1396ca7c39d0f82a91bf2632709ec7452f6a9\": container with ID starting with 6ab7e7fae09b706cf3825cf2d5f1396ca7c39d0f82a91bf2632709ec7452f6a9 not found: ID does not exist" Oct 11 05:37:15 crc kubenswrapper[4651]: I1011 05:37:15.765940 4651 scope.go:117] "RemoveContainer" containerID="fe727a964bdfa867705d5eb9aef73f5e65486805a0b20502c009d048a61f9578" Oct 11 05:37:15 crc kubenswrapper[4651]: E1011 05:37:15.768732 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fe727a964bdfa867705d5eb9aef73f5e65486805a0b20502c009d048a61f9578\": container with ID starting with fe727a964bdfa867705d5eb9aef73f5e65486805a0b20502c009d048a61f9578 not found: ID does not exist" containerID="fe727a964bdfa867705d5eb9aef73f5e65486805a0b20502c009d048a61f9578" Oct 11 05:37:15 crc kubenswrapper[4651]: I1011 05:37:15.768775 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fe727a964bdfa867705d5eb9aef73f5e65486805a0b20502c009d048a61f9578"} err="failed to get container status \"fe727a964bdfa867705d5eb9aef73f5e65486805a0b20502c009d048a61f9578\": rpc error: code = NotFound desc = could not find container \"fe727a964bdfa867705d5eb9aef73f5e65486805a0b20502c009d048a61f9578\": container with ID starting with fe727a964bdfa867705d5eb9aef73f5e65486805a0b20502c009d048a61f9578 not found: ID does not exist" Oct 11 05:37:15 crc kubenswrapper[4651]: I1011 05:37:15.768812 4651 scope.go:117] "RemoveContainer" containerID="a635ffd469e772c270979f38d96c66c7c5eade5afda1a16a2439106046712438" Oct 11 05:37:15 crc kubenswrapper[4651]: E1011 05:37:15.769693 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a635ffd469e772c270979f38d96c66c7c5eade5afda1a16a2439106046712438\": container with ID starting with a635ffd469e772c270979f38d96c66c7c5eade5afda1a16a2439106046712438 not found: ID does not exist" containerID="a635ffd469e772c270979f38d96c66c7c5eade5afda1a16a2439106046712438" Oct 11 05:37:15 crc kubenswrapper[4651]: I1011 05:37:15.769797 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a635ffd469e772c270979f38d96c66c7c5eade5afda1a16a2439106046712438"} err="failed to get container status \"a635ffd469e772c270979f38d96c66c7c5eade5afda1a16a2439106046712438\": rpc error: code = NotFound desc = could not find container \"a635ffd469e772c270979f38d96c66c7c5eade5afda1a16a2439106046712438\": container with ID starting with a635ffd469e772c270979f38d96c66c7c5eade5afda1a16a2439106046712438 not found: ID does not exist" Oct 11 05:37:15 crc kubenswrapper[4651]: I1011 05:37:15.882176 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a3f42662-a802-49b3-bdb2-ad9746e0f0e7" path="/var/lib/kubelet/pods/a3f42662-a802-49b3-bdb2-ad9746e0f0e7/volumes" Oct 11 05:37:16 crc kubenswrapper[4651]: I1011 05:37:16.310023 4651 patch_prober.go:28] interesting pod/machine-config-daemon-78jnv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 05:37:16 crc kubenswrapper[4651]: I1011 05:37:16.310112 4651 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 05:37:16 crc kubenswrapper[4651]: I1011 05:37:16.310231 4651 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" Oct 11 05:37:16 crc kubenswrapper[4651]: I1011 05:37:16.311342 4651 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"628909be9c56f029477d2c09fa758e8bc50148135b9cfb5a07c99c0471644ba6"} pod="openshift-machine-config-operator/machine-config-daemon-78jnv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 11 05:37:16 crc kubenswrapper[4651]: I1011 05:37:16.311477 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" containerName="machine-config-daemon" containerID="cri-o://628909be9c56f029477d2c09fa758e8bc50148135b9cfb5a07c99c0471644ba6" gracePeriod=600 Oct 11 05:37:16 crc kubenswrapper[4651]: I1011 05:37:16.475410 4651 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-5zv8f" podUID="b5355026-b3ac-48d2-b93c-26319d272b62" containerName="registry-server" probeResult="failure" output=< Oct 11 05:37:16 crc kubenswrapper[4651]: timeout: failed to connect service ":50051" within 1s Oct 11 05:37:16 crc kubenswrapper[4651]: > Oct 11 05:37:16 crc kubenswrapper[4651]: I1011 05:37:16.660700 4651 generic.go:334] "Generic (PLEG): container finished" podID="519a1ae1-e964-48b0-8b61-835146df28c1" containerID="628909be9c56f029477d2c09fa758e8bc50148135b9cfb5a07c99c0471644ba6" exitCode=0 Oct 11 05:37:16 crc kubenswrapper[4651]: I1011 05:37:16.660786 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" event={"ID":"519a1ae1-e964-48b0-8b61-835146df28c1","Type":"ContainerDied","Data":"628909be9c56f029477d2c09fa758e8bc50148135b9cfb5a07c99c0471644ba6"} Oct 11 05:37:16 crc kubenswrapper[4651]: I1011 05:37:16.660841 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" event={"ID":"519a1ae1-e964-48b0-8b61-835146df28c1","Type":"ContainerStarted","Data":"4f21ee74124734e4ae45f5cd30d3ff201ade9b3ffdf1481c78ccc8b374007081"} Oct 11 05:37:16 crc kubenswrapper[4651]: I1011 05:37:16.660860 4651 scope.go:117] "RemoveContainer" containerID="697c27631e7481f73fa15a7b6752c864f41b48d365447d86c85453a3dee57ea6" Oct 11 05:37:25 crc kubenswrapper[4651]: I1011 05:37:25.493973 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-5zv8f" Oct 11 05:37:25 crc kubenswrapper[4651]: I1011 05:37:25.578535 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-5zv8f" Oct 11 05:37:26 crc kubenswrapper[4651]: I1011 05:37:26.735006 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-5zv8f"] Oct 11 05:37:26 crc kubenswrapper[4651]: I1011 05:37:26.770743 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-5zv8f" podUID="b5355026-b3ac-48d2-b93c-26319d272b62" containerName="registry-server" containerID="cri-o://1681f2968f2fee4861cb754136d71333f73451a9b5c5d1ed9de3409c191ac68e" gracePeriod=2 Oct 11 05:37:27 crc kubenswrapper[4651]: I1011 05:37:27.315627 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5zv8f" Oct 11 05:37:27 crc kubenswrapper[4651]: I1011 05:37:27.412766 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b5355026-b3ac-48d2-b93c-26319d272b62-utilities\") pod \"b5355026-b3ac-48d2-b93c-26319d272b62\" (UID: \"b5355026-b3ac-48d2-b93c-26319d272b62\") " Oct 11 05:37:27 crc kubenswrapper[4651]: I1011 05:37:27.412879 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vzxcz\" (UniqueName: \"kubernetes.io/projected/b5355026-b3ac-48d2-b93c-26319d272b62-kube-api-access-vzxcz\") pod \"b5355026-b3ac-48d2-b93c-26319d272b62\" (UID: \"b5355026-b3ac-48d2-b93c-26319d272b62\") " Oct 11 05:37:27 crc kubenswrapper[4651]: I1011 05:37:27.412995 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b5355026-b3ac-48d2-b93c-26319d272b62-catalog-content\") pod \"b5355026-b3ac-48d2-b93c-26319d272b62\" (UID: \"b5355026-b3ac-48d2-b93c-26319d272b62\") " Oct 11 05:37:27 crc kubenswrapper[4651]: I1011 05:37:27.414160 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b5355026-b3ac-48d2-b93c-26319d272b62-utilities" (OuterVolumeSpecName: "utilities") pod "b5355026-b3ac-48d2-b93c-26319d272b62" (UID: "b5355026-b3ac-48d2-b93c-26319d272b62"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 05:37:27 crc kubenswrapper[4651]: I1011 05:37:27.425229 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b5355026-b3ac-48d2-b93c-26319d272b62-kube-api-access-vzxcz" (OuterVolumeSpecName: "kube-api-access-vzxcz") pod "b5355026-b3ac-48d2-b93c-26319d272b62" (UID: "b5355026-b3ac-48d2-b93c-26319d272b62"). InnerVolumeSpecName "kube-api-access-vzxcz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:37:27 crc kubenswrapper[4651]: I1011 05:37:27.515698 4651 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b5355026-b3ac-48d2-b93c-26319d272b62-utilities\") on node \"crc\" DevicePath \"\"" Oct 11 05:37:27 crc kubenswrapper[4651]: I1011 05:37:27.515742 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vzxcz\" (UniqueName: \"kubernetes.io/projected/b5355026-b3ac-48d2-b93c-26319d272b62-kube-api-access-vzxcz\") on node \"crc\" DevicePath \"\"" Oct 11 05:37:27 crc kubenswrapper[4651]: I1011 05:37:27.517940 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b5355026-b3ac-48d2-b93c-26319d272b62-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b5355026-b3ac-48d2-b93c-26319d272b62" (UID: "b5355026-b3ac-48d2-b93c-26319d272b62"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 05:37:27 crc kubenswrapper[4651]: I1011 05:37:27.616757 4651 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b5355026-b3ac-48d2-b93c-26319d272b62-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 11 05:37:27 crc kubenswrapper[4651]: I1011 05:37:27.785654 4651 generic.go:334] "Generic (PLEG): container finished" podID="b5355026-b3ac-48d2-b93c-26319d272b62" containerID="1681f2968f2fee4861cb754136d71333f73451a9b5c5d1ed9de3409c191ac68e" exitCode=0 Oct 11 05:37:27 crc kubenswrapper[4651]: I1011 05:37:27.785721 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5zv8f" event={"ID":"b5355026-b3ac-48d2-b93c-26319d272b62","Type":"ContainerDied","Data":"1681f2968f2fee4861cb754136d71333f73451a9b5c5d1ed9de3409c191ac68e"} Oct 11 05:37:27 crc kubenswrapper[4651]: I1011 05:37:27.785807 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5zv8f" event={"ID":"b5355026-b3ac-48d2-b93c-26319d272b62","Type":"ContainerDied","Data":"8acdde45db2d65bd15f489ff1c9f283646d5f30def7e123b65fcfaf2abbdd21d"} Oct 11 05:37:27 crc kubenswrapper[4651]: I1011 05:37:27.785876 4651 scope.go:117] "RemoveContainer" containerID="1681f2968f2fee4861cb754136d71333f73451a9b5c5d1ed9de3409c191ac68e" Oct 11 05:37:27 crc kubenswrapper[4651]: I1011 05:37:27.786740 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5zv8f" Oct 11 05:37:27 crc kubenswrapper[4651]: I1011 05:37:27.809155 4651 scope.go:117] "RemoveContainer" containerID="db904ed300dc9216252e2e7034d6db69fa9c859f2148b44c716a5aea28c60a45" Oct 11 05:37:27 crc kubenswrapper[4651]: I1011 05:37:27.841376 4651 scope.go:117] "RemoveContainer" containerID="377d0efbe249de94faaac1925969ab48468c15dfb2a9e7da027f4971c334fcf7" Oct 11 05:37:27 crc kubenswrapper[4651]: I1011 05:37:27.854857 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-5zv8f"] Oct 11 05:37:27 crc kubenswrapper[4651]: I1011 05:37:27.908242 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-5zv8f"] Oct 11 05:37:27 crc kubenswrapper[4651]: I1011 05:37:27.912709 4651 scope.go:117] "RemoveContainer" containerID="1681f2968f2fee4861cb754136d71333f73451a9b5c5d1ed9de3409c191ac68e" Oct 11 05:37:27 crc kubenswrapper[4651]: E1011 05:37:27.914261 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1681f2968f2fee4861cb754136d71333f73451a9b5c5d1ed9de3409c191ac68e\": container with ID starting with 1681f2968f2fee4861cb754136d71333f73451a9b5c5d1ed9de3409c191ac68e not found: ID does not exist" containerID="1681f2968f2fee4861cb754136d71333f73451a9b5c5d1ed9de3409c191ac68e" Oct 11 05:37:27 crc kubenswrapper[4651]: I1011 05:37:27.914302 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1681f2968f2fee4861cb754136d71333f73451a9b5c5d1ed9de3409c191ac68e"} err="failed to get container status \"1681f2968f2fee4861cb754136d71333f73451a9b5c5d1ed9de3409c191ac68e\": rpc error: code = NotFound desc = could not find container \"1681f2968f2fee4861cb754136d71333f73451a9b5c5d1ed9de3409c191ac68e\": container with ID starting with 1681f2968f2fee4861cb754136d71333f73451a9b5c5d1ed9de3409c191ac68e not found: ID does not exist" Oct 11 05:37:27 crc kubenswrapper[4651]: I1011 05:37:27.914336 4651 scope.go:117] "RemoveContainer" containerID="db904ed300dc9216252e2e7034d6db69fa9c859f2148b44c716a5aea28c60a45" Oct 11 05:37:27 crc kubenswrapper[4651]: E1011 05:37:27.914694 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"db904ed300dc9216252e2e7034d6db69fa9c859f2148b44c716a5aea28c60a45\": container with ID starting with db904ed300dc9216252e2e7034d6db69fa9c859f2148b44c716a5aea28c60a45 not found: ID does not exist" containerID="db904ed300dc9216252e2e7034d6db69fa9c859f2148b44c716a5aea28c60a45" Oct 11 05:37:27 crc kubenswrapper[4651]: I1011 05:37:27.914760 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"db904ed300dc9216252e2e7034d6db69fa9c859f2148b44c716a5aea28c60a45"} err="failed to get container status \"db904ed300dc9216252e2e7034d6db69fa9c859f2148b44c716a5aea28c60a45\": rpc error: code = NotFound desc = could not find container \"db904ed300dc9216252e2e7034d6db69fa9c859f2148b44c716a5aea28c60a45\": container with ID starting with db904ed300dc9216252e2e7034d6db69fa9c859f2148b44c716a5aea28c60a45 not found: ID does not exist" Oct 11 05:37:27 crc kubenswrapper[4651]: I1011 05:37:27.914865 4651 scope.go:117] "RemoveContainer" containerID="377d0efbe249de94faaac1925969ab48468c15dfb2a9e7da027f4971c334fcf7" Oct 11 05:37:27 crc kubenswrapper[4651]: E1011 05:37:27.915282 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"377d0efbe249de94faaac1925969ab48468c15dfb2a9e7da027f4971c334fcf7\": container with ID starting with 377d0efbe249de94faaac1925969ab48468c15dfb2a9e7da027f4971c334fcf7 not found: ID does not exist" containerID="377d0efbe249de94faaac1925969ab48468c15dfb2a9e7da027f4971c334fcf7" Oct 11 05:37:27 crc kubenswrapper[4651]: I1011 05:37:27.915329 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"377d0efbe249de94faaac1925969ab48468c15dfb2a9e7da027f4971c334fcf7"} err="failed to get container status \"377d0efbe249de94faaac1925969ab48468c15dfb2a9e7da027f4971c334fcf7\": rpc error: code = NotFound desc = could not find container \"377d0efbe249de94faaac1925969ab48468c15dfb2a9e7da027f4971c334fcf7\": container with ID starting with 377d0efbe249de94faaac1925969ab48468c15dfb2a9e7da027f4971c334fcf7 not found: ID does not exist" Oct 11 05:37:29 crc kubenswrapper[4651]: I1011 05:37:29.890664 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b5355026-b3ac-48d2-b93c-26319d272b62" path="/var/lib/kubelet/pods/b5355026-b3ac-48d2-b93c-26319d272b62/volumes" Oct 11 05:37:33 crc kubenswrapper[4651]: E1011 05:37:33.915100 4651 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.182:33976->38.102.83.182:34445: write tcp 38.102.83.182:33976->38.102.83.182:34445: write: broken pipe Oct 11 05:37:54 crc kubenswrapper[4651]: I1011 05:37:54.310571 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/tempest-tests-tempest"] Oct 11 05:37:54 crc kubenswrapper[4651]: E1011 05:37:54.311948 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a3f42662-a802-49b3-bdb2-ad9746e0f0e7" containerName="extract-content" Oct 11 05:37:54 crc kubenswrapper[4651]: I1011 05:37:54.311970 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="a3f42662-a802-49b3-bdb2-ad9746e0f0e7" containerName="extract-content" Oct 11 05:37:54 crc kubenswrapper[4651]: E1011 05:37:54.312000 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b5355026-b3ac-48d2-b93c-26319d272b62" containerName="extract-content" Oct 11 05:37:54 crc kubenswrapper[4651]: I1011 05:37:54.312012 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="b5355026-b3ac-48d2-b93c-26319d272b62" containerName="extract-content" Oct 11 05:37:54 crc kubenswrapper[4651]: E1011 05:37:54.312049 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b5355026-b3ac-48d2-b93c-26319d272b62" containerName="registry-server" Oct 11 05:37:54 crc kubenswrapper[4651]: I1011 05:37:54.312060 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="b5355026-b3ac-48d2-b93c-26319d272b62" containerName="registry-server" Oct 11 05:37:54 crc kubenswrapper[4651]: E1011 05:37:54.312083 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a3f42662-a802-49b3-bdb2-ad9746e0f0e7" containerName="registry-server" Oct 11 05:37:54 crc kubenswrapper[4651]: I1011 05:37:54.312096 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="a3f42662-a802-49b3-bdb2-ad9746e0f0e7" containerName="registry-server" Oct 11 05:37:54 crc kubenswrapper[4651]: E1011 05:37:54.312131 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a3f42662-a802-49b3-bdb2-ad9746e0f0e7" containerName="extract-utilities" Oct 11 05:37:54 crc kubenswrapper[4651]: I1011 05:37:54.312143 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="a3f42662-a802-49b3-bdb2-ad9746e0f0e7" containerName="extract-utilities" Oct 11 05:37:54 crc kubenswrapper[4651]: E1011 05:37:54.312172 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b5355026-b3ac-48d2-b93c-26319d272b62" containerName="extract-utilities" Oct 11 05:37:54 crc kubenswrapper[4651]: I1011 05:37:54.312184 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="b5355026-b3ac-48d2-b93c-26319d272b62" containerName="extract-utilities" Oct 11 05:37:54 crc kubenswrapper[4651]: I1011 05:37:54.312551 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="b5355026-b3ac-48d2-b93c-26319d272b62" containerName="registry-server" Oct 11 05:37:54 crc kubenswrapper[4651]: I1011 05:37:54.312579 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="a3f42662-a802-49b3-bdb2-ad9746e0f0e7" containerName="registry-server" Oct 11 05:37:54 crc kubenswrapper[4651]: I1011 05:37:54.314439 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Oct 11 05:37:54 crc kubenswrapper[4651]: I1011 05:37:54.329097 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Oct 11 05:37:54 crc kubenswrapper[4651]: I1011 05:37:54.340433 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"test-operator-controller-priv-key" Oct 11 05:37:54 crc kubenswrapper[4651]: I1011 05:37:54.340454 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-r2cjc" Oct 11 05:37:54 crc kubenswrapper[4651]: I1011 05:37:54.340455 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Oct 11 05:37:54 crc kubenswrapper[4651]: I1011 05:37:54.342706 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-custom-data-s0" Oct 11 05:37:54 crc kubenswrapper[4651]: I1011 05:37:54.469267 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/76b8d472-5f4e-4d97-be15-0f5be51acd85-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"76b8d472-5f4e-4d97-be15-0f5be51acd85\") " pod="openstack/tempest-tests-tempest" Oct 11 05:37:54 crc kubenswrapper[4651]: I1011 05:37:54.469350 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rrx2f\" (UniqueName: \"kubernetes.io/projected/76b8d472-5f4e-4d97-be15-0f5be51acd85-kube-api-access-rrx2f\") pod \"tempest-tests-tempest\" (UID: \"76b8d472-5f4e-4d97-be15-0f5be51acd85\") " pod="openstack/tempest-tests-tempest" Oct 11 05:37:54 crc kubenswrapper[4651]: I1011 05:37:54.469465 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/76b8d472-5f4e-4d97-be15-0f5be51acd85-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"76b8d472-5f4e-4d97-be15-0f5be51acd85\") " pod="openstack/tempest-tests-tempest" Oct 11 05:37:54 crc kubenswrapper[4651]: I1011 05:37:54.469578 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"tempest-tests-tempest\" (UID: \"76b8d472-5f4e-4d97-be15-0f5be51acd85\") " pod="openstack/tempest-tests-tempest" Oct 11 05:37:54 crc kubenswrapper[4651]: I1011 05:37:54.469671 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/76b8d472-5f4e-4d97-be15-0f5be51acd85-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"76b8d472-5f4e-4d97-be15-0f5be51acd85\") " pod="openstack/tempest-tests-tempest" Oct 11 05:37:54 crc kubenswrapper[4651]: I1011 05:37:54.469726 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/76b8d472-5f4e-4d97-be15-0f5be51acd85-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"76b8d472-5f4e-4d97-be15-0f5be51acd85\") " pod="openstack/tempest-tests-tempest" Oct 11 05:37:54 crc kubenswrapper[4651]: I1011 05:37:54.469768 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/76b8d472-5f4e-4d97-be15-0f5be51acd85-config-data\") pod \"tempest-tests-tempest\" (UID: \"76b8d472-5f4e-4d97-be15-0f5be51acd85\") " pod="openstack/tempest-tests-tempest" Oct 11 05:37:54 crc kubenswrapper[4651]: I1011 05:37:54.469807 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/76b8d472-5f4e-4d97-be15-0f5be51acd85-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"76b8d472-5f4e-4d97-be15-0f5be51acd85\") " pod="openstack/tempest-tests-tempest" Oct 11 05:37:54 crc kubenswrapper[4651]: I1011 05:37:54.470076 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/76b8d472-5f4e-4d97-be15-0f5be51acd85-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"76b8d472-5f4e-4d97-be15-0f5be51acd85\") " pod="openstack/tempest-tests-tempest" Oct 11 05:37:54 crc kubenswrapper[4651]: I1011 05:37:54.572771 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/76b8d472-5f4e-4d97-be15-0f5be51acd85-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"76b8d472-5f4e-4d97-be15-0f5be51acd85\") " pod="openstack/tempest-tests-tempest" Oct 11 05:37:54 crc kubenswrapper[4651]: I1011 05:37:54.572875 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rrx2f\" (UniqueName: \"kubernetes.io/projected/76b8d472-5f4e-4d97-be15-0f5be51acd85-kube-api-access-rrx2f\") pod \"tempest-tests-tempest\" (UID: \"76b8d472-5f4e-4d97-be15-0f5be51acd85\") " pod="openstack/tempest-tests-tempest" Oct 11 05:37:54 crc kubenswrapper[4651]: I1011 05:37:54.572983 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/76b8d472-5f4e-4d97-be15-0f5be51acd85-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"76b8d472-5f4e-4d97-be15-0f5be51acd85\") " pod="openstack/tempest-tests-tempest" Oct 11 05:37:54 crc kubenswrapper[4651]: I1011 05:37:54.573091 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"tempest-tests-tempest\" (UID: \"76b8d472-5f4e-4d97-be15-0f5be51acd85\") " pod="openstack/tempest-tests-tempest" Oct 11 05:37:54 crc kubenswrapper[4651]: I1011 05:37:54.573187 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/76b8d472-5f4e-4d97-be15-0f5be51acd85-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"76b8d472-5f4e-4d97-be15-0f5be51acd85\") " pod="openstack/tempest-tests-tempest" Oct 11 05:37:54 crc kubenswrapper[4651]: I1011 05:37:54.573261 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/76b8d472-5f4e-4d97-be15-0f5be51acd85-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"76b8d472-5f4e-4d97-be15-0f5be51acd85\") " pod="openstack/tempest-tests-tempest" Oct 11 05:37:54 crc kubenswrapper[4651]: I1011 05:37:54.573319 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/76b8d472-5f4e-4d97-be15-0f5be51acd85-config-data\") pod \"tempest-tests-tempest\" (UID: \"76b8d472-5f4e-4d97-be15-0f5be51acd85\") " pod="openstack/tempest-tests-tempest" Oct 11 05:37:54 crc kubenswrapper[4651]: I1011 05:37:54.573371 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/76b8d472-5f4e-4d97-be15-0f5be51acd85-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"76b8d472-5f4e-4d97-be15-0f5be51acd85\") " pod="openstack/tempest-tests-tempest" Oct 11 05:37:54 crc kubenswrapper[4651]: I1011 05:37:54.573420 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/76b8d472-5f4e-4d97-be15-0f5be51acd85-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"76b8d472-5f4e-4d97-be15-0f5be51acd85\") " pod="openstack/tempest-tests-tempest" Oct 11 05:37:54 crc kubenswrapper[4651]: I1011 05:37:54.573689 4651 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"tempest-tests-tempest\" (UID: \"76b8d472-5f4e-4d97-be15-0f5be51acd85\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/tempest-tests-tempest" Oct 11 05:37:54 crc kubenswrapper[4651]: I1011 05:37:54.574166 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/76b8d472-5f4e-4d97-be15-0f5be51acd85-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"76b8d472-5f4e-4d97-be15-0f5be51acd85\") " pod="openstack/tempest-tests-tempest" Oct 11 05:37:54 crc kubenswrapper[4651]: I1011 05:37:54.574340 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/76b8d472-5f4e-4d97-be15-0f5be51acd85-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"76b8d472-5f4e-4d97-be15-0f5be51acd85\") " pod="openstack/tempest-tests-tempest" Oct 11 05:37:54 crc kubenswrapper[4651]: I1011 05:37:54.575150 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/76b8d472-5f4e-4d97-be15-0f5be51acd85-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"76b8d472-5f4e-4d97-be15-0f5be51acd85\") " pod="openstack/tempest-tests-tempest" Oct 11 05:37:54 crc kubenswrapper[4651]: I1011 05:37:54.576108 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/76b8d472-5f4e-4d97-be15-0f5be51acd85-config-data\") pod \"tempest-tests-tempest\" (UID: \"76b8d472-5f4e-4d97-be15-0f5be51acd85\") " pod="openstack/tempest-tests-tempest" Oct 11 05:37:54 crc kubenswrapper[4651]: I1011 05:37:54.580711 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/76b8d472-5f4e-4d97-be15-0f5be51acd85-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"76b8d472-5f4e-4d97-be15-0f5be51acd85\") " pod="openstack/tempest-tests-tempest" Oct 11 05:37:54 crc kubenswrapper[4651]: I1011 05:37:54.581585 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/76b8d472-5f4e-4d97-be15-0f5be51acd85-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"76b8d472-5f4e-4d97-be15-0f5be51acd85\") " pod="openstack/tempest-tests-tempest" Oct 11 05:37:54 crc kubenswrapper[4651]: I1011 05:37:54.586510 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/76b8d472-5f4e-4d97-be15-0f5be51acd85-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"76b8d472-5f4e-4d97-be15-0f5be51acd85\") " pod="openstack/tempest-tests-tempest" Oct 11 05:37:54 crc kubenswrapper[4651]: I1011 05:37:54.606380 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rrx2f\" (UniqueName: \"kubernetes.io/projected/76b8d472-5f4e-4d97-be15-0f5be51acd85-kube-api-access-rrx2f\") pod \"tempest-tests-tempest\" (UID: \"76b8d472-5f4e-4d97-be15-0f5be51acd85\") " pod="openstack/tempest-tests-tempest" Oct 11 05:37:54 crc kubenswrapper[4651]: I1011 05:37:54.633969 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"tempest-tests-tempest\" (UID: \"76b8d472-5f4e-4d97-be15-0f5be51acd85\") " pod="openstack/tempest-tests-tempest" Oct 11 05:37:54 crc kubenswrapper[4651]: I1011 05:37:54.673520 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Oct 11 05:37:54 crc kubenswrapper[4651]: I1011 05:37:54.988098 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Oct 11 05:37:55 crc kubenswrapper[4651]: I1011 05:37:55.109792 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"76b8d472-5f4e-4d97-be15-0f5be51acd85","Type":"ContainerStarted","Data":"3b3887f270fc0f921fadfcae1d12dacbcede15296f2f17225a364dd07503b6c6"} Oct 11 05:38:23 crc kubenswrapper[4651]: E1011 05:38:23.247250 4651 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified" Oct 11 05:38:23 crc kubenswrapper[4651]: E1011 05:38:23.248262 4651 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:tempest-tests-tempest-tests-runner,Image:quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:test-operator-ephemeral-workdir,ReadOnly:false,MountPath:/var/lib/tempest,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-ephemeral-temporary,ReadOnly:false,MountPath:/tmp,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:false,MountPath:/etc/test_operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-logs,ReadOnly:false,MountPath:/var/lib/tempest/external_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/etc/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/var/lib/tempest/.config/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config-secret,ReadOnly:false,MountPath:/etc/openstack/secure.yaml,SubPath:secure.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ca-certs,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ssh-key,ReadOnly:false,MountPath:/var/lib/tempest/id_ecdsa,SubPath:ssh_key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-rrx2f,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42480,RunAsNonRoot:*false,ReadOnlyRootFilesystem:*false,AllowPrivilegeEscalation:*true,RunAsGroup:*42480,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-custom-data-s0,},Optional:nil,},SecretRef:nil,},EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-env-vars-s0,},Optional:nil,},SecretRef:nil,},},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod tempest-tests-tempest_openstack(76b8d472-5f4e-4d97-be15-0f5be51acd85): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 11 05:38:23 crc kubenswrapper[4651]: E1011 05:38:23.251028 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/tempest-tests-tempest" podUID="76b8d472-5f4e-4d97-be15-0f5be51acd85" Oct 11 05:38:23 crc kubenswrapper[4651]: E1011 05:38:23.429159 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified\\\"\"" pod="openstack/tempest-tests-tempest" podUID="76b8d472-5f4e-4d97-be15-0f5be51acd85" Oct 11 05:38:36 crc kubenswrapper[4651]: I1011 05:38:36.322699 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Oct 11 05:38:38 crc kubenswrapper[4651]: I1011 05:38:38.648179 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"76b8d472-5f4e-4d97-be15-0f5be51acd85","Type":"ContainerStarted","Data":"9892b94c05e15011ecc833b92d8df53c89af4075a96bcb6f9de3b6ff25cabe82"} Oct 11 05:38:38 crc kubenswrapper[4651]: I1011 05:38:38.680969 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/tempest-tests-tempest" podStartSLOduration=4.35939183 podStartE2EDuration="45.680934923s" podCreationTimestamp="2025-10-11 05:37:53 +0000 UTC" firstStartedPulling="2025-10-11 05:37:54.99801146 +0000 UTC m=+2795.894244276" lastFinishedPulling="2025-10-11 05:38:36.319554543 +0000 UTC m=+2837.215787369" observedRunningTime="2025-10-11 05:38:38.673671367 +0000 UTC m=+2839.569904163" watchObservedRunningTime="2025-10-11 05:38:38.680934923 +0000 UTC m=+2839.577167759" Oct 11 05:39:16 crc kubenswrapper[4651]: I1011 05:39:16.310669 4651 patch_prober.go:28] interesting pod/machine-config-daemon-78jnv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 05:39:16 crc kubenswrapper[4651]: I1011 05:39:16.311456 4651 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 05:39:46 crc kubenswrapper[4651]: I1011 05:39:46.310202 4651 patch_prober.go:28] interesting pod/machine-config-daemon-78jnv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 05:39:46 crc kubenswrapper[4651]: I1011 05:39:46.310916 4651 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 05:40:03 crc kubenswrapper[4651]: I1011 05:40:03.908092 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-8q9dc"] Oct 11 05:40:03 crc kubenswrapper[4651]: I1011 05:40:03.914966 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8q9dc" Oct 11 05:40:03 crc kubenswrapper[4651]: I1011 05:40:03.916513 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-8q9dc"] Oct 11 05:40:03 crc kubenswrapper[4651]: I1011 05:40:03.968090 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3644e06e-bc6e-4b90-935f-15c486493277-utilities\") pod \"certified-operators-8q9dc\" (UID: \"3644e06e-bc6e-4b90-935f-15c486493277\") " pod="openshift-marketplace/certified-operators-8q9dc" Oct 11 05:40:03 crc kubenswrapper[4651]: I1011 05:40:03.968137 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qcjwp\" (UniqueName: \"kubernetes.io/projected/3644e06e-bc6e-4b90-935f-15c486493277-kube-api-access-qcjwp\") pod \"certified-operators-8q9dc\" (UID: \"3644e06e-bc6e-4b90-935f-15c486493277\") " pod="openshift-marketplace/certified-operators-8q9dc" Oct 11 05:40:03 crc kubenswrapper[4651]: I1011 05:40:03.968177 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3644e06e-bc6e-4b90-935f-15c486493277-catalog-content\") pod \"certified-operators-8q9dc\" (UID: \"3644e06e-bc6e-4b90-935f-15c486493277\") " pod="openshift-marketplace/certified-operators-8q9dc" Oct 11 05:40:04 crc kubenswrapper[4651]: I1011 05:40:04.069984 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3644e06e-bc6e-4b90-935f-15c486493277-utilities\") pod \"certified-operators-8q9dc\" (UID: \"3644e06e-bc6e-4b90-935f-15c486493277\") " pod="openshift-marketplace/certified-operators-8q9dc" Oct 11 05:40:04 crc kubenswrapper[4651]: I1011 05:40:04.070032 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qcjwp\" (UniqueName: \"kubernetes.io/projected/3644e06e-bc6e-4b90-935f-15c486493277-kube-api-access-qcjwp\") pod \"certified-operators-8q9dc\" (UID: \"3644e06e-bc6e-4b90-935f-15c486493277\") " pod="openshift-marketplace/certified-operators-8q9dc" Oct 11 05:40:04 crc kubenswrapper[4651]: I1011 05:40:04.070063 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3644e06e-bc6e-4b90-935f-15c486493277-catalog-content\") pod \"certified-operators-8q9dc\" (UID: \"3644e06e-bc6e-4b90-935f-15c486493277\") " pod="openshift-marketplace/certified-operators-8q9dc" Oct 11 05:40:04 crc kubenswrapper[4651]: I1011 05:40:04.070645 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3644e06e-bc6e-4b90-935f-15c486493277-utilities\") pod \"certified-operators-8q9dc\" (UID: \"3644e06e-bc6e-4b90-935f-15c486493277\") " pod="openshift-marketplace/certified-operators-8q9dc" Oct 11 05:40:04 crc kubenswrapper[4651]: I1011 05:40:04.070788 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3644e06e-bc6e-4b90-935f-15c486493277-catalog-content\") pod \"certified-operators-8q9dc\" (UID: \"3644e06e-bc6e-4b90-935f-15c486493277\") " pod="openshift-marketplace/certified-operators-8q9dc" Oct 11 05:40:04 crc kubenswrapper[4651]: I1011 05:40:04.099996 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qcjwp\" (UniqueName: \"kubernetes.io/projected/3644e06e-bc6e-4b90-935f-15c486493277-kube-api-access-qcjwp\") pod \"certified-operators-8q9dc\" (UID: \"3644e06e-bc6e-4b90-935f-15c486493277\") " pod="openshift-marketplace/certified-operators-8q9dc" Oct 11 05:40:04 crc kubenswrapper[4651]: I1011 05:40:04.262090 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8q9dc" Oct 11 05:40:04 crc kubenswrapper[4651]: I1011 05:40:04.786990 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-8q9dc"] Oct 11 05:40:05 crc kubenswrapper[4651]: I1011 05:40:05.799113 4651 generic.go:334] "Generic (PLEG): container finished" podID="3644e06e-bc6e-4b90-935f-15c486493277" containerID="f2b708c575bb07945eac8cd53edca0bae5153330db6b148cc8274e616d324470" exitCode=0 Oct 11 05:40:05 crc kubenswrapper[4651]: I1011 05:40:05.799215 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8q9dc" event={"ID":"3644e06e-bc6e-4b90-935f-15c486493277","Type":"ContainerDied","Data":"f2b708c575bb07945eac8cd53edca0bae5153330db6b148cc8274e616d324470"} Oct 11 05:40:05 crc kubenswrapper[4651]: I1011 05:40:05.799476 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8q9dc" event={"ID":"3644e06e-bc6e-4b90-935f-15c486493277","Type":"ContainerStarted","Data":"2757b7e5de823dd1fe5fc2041cc8381ff5e331b72d3d36f3a7b8716b3d24cea3"} Oct 11 05:40:05 crc kubenswrapper[4651]: I1011 05:40:05.802477 4651 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 11 05:40:06 crc kubenswrapper[4651]: I1011 05:40:06.810039 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8q9dc" event={"ID":"3644e06e-bc6e-4b90-935f-15c486493277","Type":"ContainerStarted","Data":"6362b154c95283c682c4b4cb3e95705a4aa477c4d8143c3d2f0381b4e1600b15"} Oct 11 05:40:07 crc kubenswrapper[4651]: I1011 05:40:07.824546 4651 generic.go:334] "Generic (PLEG): container finished" podID="3644e06e-bc6e-4b90-935f-15c486493277" containerID="6362b154c95283c682c4b4cb3e95705a4aa477c4d8143c3d2f0381b4e1600b15" exitCode=0 Oct 11 05:40:07 crc kubenswrapper[4651]: I1011 05:40:07.824596 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8q9dc" event={"ID":"3644e06e-bc6e-4b90-935f-15c486493277","Type":"ContainerDied","Data":"6362b154c95283c682c4b4cb3e95705a4aa477c4d8143c3d2f0381b4e1600b15"} Oct 11 05:40:08 crc kubenswrapper[4651]: I1011 05:40:08.845266 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8q9dc" event={"ID":"3644e06e-bc6e-4b90-935f-15c486493277","Type":"ContainerStarted","Data":"181aa3499972d06c37501464c7aa01bd107d14914e622cd7d0397390abcd61d3"} Oct 11 05:40:08 crc kubenswrapper[4651]: I1011 05:40:08.873490 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-8q9dc" podStartSLOduration=3.343513796 podStartE2EDuration="5.873466858s" podCreationTimestamp="2025-10-11 05:40:03 +0000 UTC" firstStartedPulling="2025-10-11 05:40:05.802219938 +0000 UTC m=+2926.698452734" lastFinishedPulling="2025-10-11 05:40:08.33217297 +0000 UTC m=+2929.228405796" observedRunningTime="2025-10-11 05:40:08.871401285 +0000 UTC m=+2929.767634101" watchObservedRunningTime="2025-10-11 05:40:08.873466858 +0000 UTC m=+2929.769699684" Oct 11 05:40:14 crc kubenswrapper[4651]: I1011 05:40:14.262624 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-8q9dc" Oct 11 05:40:14 crc kubenswrapper[4651]: I1011 05:40:14.263461 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-8q9dc" Oct 11 05:40:14 crc kubenswrapper[4651]: I1011 05:40:14.308900 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-8q9dc" Oct 11 05:40:14 crc kubenswrapper[4651]: I1011 05:40:14.988972 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-8q9dc" Oct 11 05:40:15 crc kubenswrapper[4651]: I1011 05:40:15.060280 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-8q9dc"] Oct 11 05:40:16 crc kubenswrapper[4651]: I1011 05:40:16.311307 4651 patch_prober.go:28] interesting pod/machine-config-daemon-78jnv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 05:40:16 crc kubenswrapper[4651]: I1011 05:40:16.312388 4651 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 05:40:16 crc kubenswrapper[4651]: I1011 05:40:16.312493 4651 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" Oct 11 05:40:16 crc kubenswrapper[4651]: I1011 05:40:16.314121 4651 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"4f21ee74124734e4ae45f5cd30d3ff201ade9b3ffdf1481c78ccc8b374007081"} pod="openshift-machine-config-operator/machine-config-daemon-78jnv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 11 05:40:16 crc kubenswrapper[4651]: I1011 05:40:16.314215 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" containerName="machine-config-daemon" containerID="cri-o://4f21ee74124734e4ae45f5cd30d3ff201ade9b3ffdf1481c78ccc8b374007081" gracePeriod=600 Oct 11 05:40:16 crc kubenswrapper[4651]: E1011 05:40:16.447333 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 05:40:16 crc kubenswrapper[4651]: I1011 05:40:16.932452 4651 generic.go:334] "Generic (PLEG): container finished" podID="519a1ae1-e964-48b0-8b61-835146df28c1" containerID="4f21ee74124734e4ae45f5cd30d3ff201ade9b3ffdf1481c78ccc8b374007081" exitCode=0 Oct 11 05:40:16 crc kubenswrapper[4651]: I1011 05:40:16.932537 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" event={"ID":"519a1ae1-e964-48b0-8b61-835146df28c1","Type":"ContainerDied","Data":"4f21ee74124734e4ae45f5cd30d3ff201ade9b3ffdf1481c78ccc8b374007081"} Oct 11 05:40:16 crc kubenswrapper[4651]: I1011 05:40:16.932627 4651 scope.go:117] "RemoveContainer" containerID="628909be9c56f029477d2c09fa758e8bc50148135b9cfb5a07c99c0471644ba6" Oct 11 05:40:16 crc kubenswrapper[4651]: I1011 05:40:16.932875 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-8q9dc" podUID="3644e06e-bc6e-4b90-935f-15c486493277" containerName="registry-server" containerID="cri-o://181aa3499972d06c37501464c7aa01bd107d14914e622cd7d0397390abcd61d3" gracePeriod=2 Oct 11 05:40:16 crc kubenswrapper[4651]: I1011 05:40:16.934348 4651 scope.go:117] "RemoveContainer" containerID="4f21ee74124734e4ae45f5cd30d3ff201ade9b3ffdf1481c78ccc8b374007081" Oct 11 05:40:16 crc kubenswrapper[4651]: E1011 05:40:16.935030 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 05:40:17 crc kubenswrapper[4651]: I1011 05:40:17.470340 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8q9dc" Oct 11 05:40:17 crc kubenswrapper[4651]: I1011 05:40:17.533689 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qcjwp\" (UniqueName: \"kubernetes.io/projected/3644e06e-bc6e-4b90-935f-15c486493277-kube-api-access-qcjwp\") pod \"3644e06e-bc6e-4b90-935f-15c486493277\" (UID: \"3644e06e-bc6e-4b90-935f-15c486493277\") " Oct 11 05:40:17 crc kubenswrapper[4651]: I1011 05:40:17.533755 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3644e06e-bc6e-4b90-935f-15c486493277-catalog-content\") pod \"3644e06e-bc6e-4b90-935f-15c486493277\" (UID: \"3644e06e-bc6e-4b90-935f-15c486493277\") " Oct 11 05:40:17 crc kubenswrapper[4651]: I1011 05:40:17.533785 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3644e06e-bc6e-4b90-935f-15c486493277-utilities\") pod \"3644e06e-bc6e-4b90-935f-15c486493277\" (UID: \"3644e06e-bc6e-4b90-935f-15c486493277\") " Oct 11 05:40:17 crc kubenswrapper[4651]: I1011 05:40:17.535332 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3644e06e-bc6e-4b90-935f-15c486493277-utilities" (OuterVolumeSpecName: "utilities") pod "3644e06e-bc6e-4b90-935f-15c486493277" (UID: "3644e06e-bc6e-4b90-935f-15c486493277"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 05:40:17 crc kubenswrapper[4651]: I1011 05:40:17.547143 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3644e06e-bc6e-4b90-935f-15c486493277-kube-api-access-qcjwp" (OuterVolumeSpecName: "kube-api-access-qcjwp") pod "3644e06e-bc6e-4b90-935f-15c486493277" (UID: "3644e06e-bc6e-4b90-935f-15c486493277"). InnerVolumeSpecName "kube-api-access-qcjwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:40:17 crc kubenswrapper[4651]: I1011 05:40:17.616202 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3644e06e-bc6e-4b90-935f-15c486493277-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3644e06e-bc6e-4b90-935f-15c486493277" (UID: "3644e06e-bc6e-4b90-935f-15c486493277"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 05:40:17 crc kubenswrapper[4651]: I1011 05:40:17.637762 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qcjwp\" (UniqueName: \"kubernetes.io/projected/3644e06e-bc6e-4b90-935f-15c486493277-kube-api-access-qcjwp\") on node \"crc\" DevicePath \"\"" Oct 11 05:40:17 crc kubenswrapper[4651]: I1011 05:40:17.637914 4651 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3644e06e-bc6e-4b90-935f-15c486493277-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 11 05:40:17 crc kubenswrapper[4651]: I1011 05:40:17.637985 4651 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3644e06e-bc6e-4b90-935f-15c486493277-utilities\") on node \"crc\" DevicePath \"\"" Oct 11 05:40:17 crc kubenswrapper[4651]: I1011 05:40:17.947368 4651 generic.go:334] "Generic (PLEG): container finished" podID="3644e06e-bc6e-4b90-935f-15c486493277" containerID="181aa3499972d06c37501464c7aa01bd107d14914e622cd7d0397390abcd61d3" exitCode=0 Oct 11 05:40:17 crc kubenswrapper[4651]: I1011 05:40:17.947415 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8q9dc" event={"ID":"3644e06e-bc6e-4b90-935f-15c486493277","Type":"ContainerDied","Data":"181aa3499972d06c37501464c7aa01bd107d14914e622cd7d0397390abcd61d3"} Oct 11 05:40:17 crc kubenswrapper[4651]: I1011 05:40:17.947477 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8q9dc" Oct 11 05:40:17 crc kubenswrapper[4651]: I1011 05:40:17.947499 4651 scope.go:117] "RemoveContainer" containerID="181aa3499972d06c37501464c7aa01bd107d14914e622cd7d0397390abcd61d3" Oct 11 05:40:17 crc kubenswrapper[4651]: I1011 05:40:17.947482 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8q9dc" event={"ID":"3644e06e-bc6e-4b90-935f-15c486493277","Type":"ContainerDied","Data":"2757b7e5de823dd1fe5fc2041cc8381ff5e331b72d3d36f3a7b8716b3d24cea3"} Oct 11 05:40:17 crc kubenswrapper[4651]: I1011 05:40:17.981661 4651 scope.go:117] "RemoveContainer" containerID="6362b154c95283c682c4b4cb3e95705a4aa477c4d8143c3d2f0381b4e1600b15" Oct 11 05:40:17 crc kubenswrapper[4651]: I1011 05:40:17.983120 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-8q9dc"] Oct 11 05:40:17 crc kubenswrapper[4651]: I1011 05:40:17.993163 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-8q9dc"] Oct 11 05:40:18 crc kubenswrapper[4651]: I1011 05:40:18.015008 4651 scope.go:117] "RemoveContainer" containerID="f2b708c575bb07945eac8cd53edca0bae5153330db6b148cc8274e616d324470" Oct 11 05:40:18 crc kubenswrapper[4651]: I1011 05:40:18.081880 4651 scope.go:117] "RemoveContainer" containerID="181aa3499972d06c37501464c7aa01bd107d14914e622cd7d0397390abcd61d3" Oct 11 05:40:18 crc kubenswrapper[4651]: E1011 05:40:18.082455 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"181aa3499972d06c37501464c7aa01bd107d14914e622cd7d0397390abcd61d3\": container with ID starting with 181aa3499972d06c37501464c7aa01bd107d14914e622cd7d0397390abcd61d3 not found: ID does not exist" containerID="181aa3499972d06c37501464c7aa01bd107d14914e622cd7d0397390abcd61d3" Oct 11 05:40:18 crc kubenswrapper[4651]: I1011 05:40:18.082553 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"181aa3499972d06c37501464c7aa01bd107d14914e622cd7d0397390abcd61d3"} err="failed to get container status \"181aa3499972d06c37501464c7aa01bd107d14914e622cd7d0397390abcd61d3\": rpc error: code = NotFound desc = could not find container \"181aa3499972d06c37501464c7aa01bd107d14914e622cd7d0397390abcd61d3\": container with ID starting with 181aa3499972d06c37501464c7aa01bd107d14914e622cd7d0397390abcd61d3 not found: ID does not exist" Oct 11 05:40:18 crc kubenswrapper[4651]: I1011 05:40:18.082637 4651 scope.go:117] "RemoveContainer" containerID="6362b154c95283c682c4b4cb3e95705a4aa477c4d8143c3d2f0381b4e1600b15" Oct 11 05:40:18 crc kubenswrapper[4651]: E1011 05:40:18.083196 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6362b154c95283c682c4b4cb3e95705a4aa477c4d8143c3d2f0381b4e1600b15\": container with ID starting with 6362b154c95283c682c4b4cb3e95705a4aa477c4d8143c3d2f0381b4e1600b15 not found: ID does not exist" containerID="6362b154c95283c682c4b4cb3e95705a4aa477c4d8143c3d2f0381b4e1600b15" Oct 11 05:40:18 crc kubenswrapper[4651]: I1011 05:40:18.083256 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6362b154c95283c682c4b4cb3e95705a4aa477c4d8143c3d2f0381b4e1600b15"} err="failed to get container status \"6362b154c95283c682c4b4cb3e95705a4aa477c4d8143c3d2f0381b4e1600b15\": rpc error: code = NotFound desc = could not find container \"6362b154c95283c682c4b4cb3e95705a4aa477c4d8143c3d2f0381b4e1600b15\": container with ID starting with 6362b154c95283c682c4b4cb3e95705a4aa477c4d8143c3d2f0381b4e1600b15 not found: ID does not exist" Oct 11 05:40:18 crc kubenswrapper[4651]: I1011 05:40:18.083288 4651 scope.go:117] "RemoveContainer" containerID="f2b708c575bb07945eac8cd53edca0bae5153330db6b148cc8274e616d324470" Oct 11 05:40:18 crc kubenswrapper[4651]: E1011 05:40:18.083841 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f2b708c575bb07945eac8cd53edca0bae5153330db6b148cc8274e616d324470\": container with ID starting with f2b708c575bb07945eac8cd53edca0bae5153330db6b148cc8274e616d324470 not found: ID does not exist" containerID="f2b708c575bb07945eac8cd53edca0bae5153330db6b148cc8274e616d324470" Oct 11 05:40:18 crc kubenswrapper[4651]: I1011 05:40:18.083901 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f2b708c575bb07945eac8cd53edca0bae5153330db6b148cc8274e616d324470"} err="failed to get container status \"f2b708c575bb07945eac8cd53edca0bae5153330db6b148cc8274e616d324470\": rpc error: code = NotFound desc = could not find container \"f2b708c575bb07945eac8cd53edca0bae5153330db6b148cc8274e616d324470\": container with ID starting with f2b708c575bb07945eac8cd53edca0bae5153330db6b148cc8274e616d324470 not found: ID does not exist" Oct 11 05:40:19 crc kubenswrapper[4651]: I1011 05:40:19.947086 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3644e06e-bc6e-4b90-935f-15c486493277" path="/var/lib/kubelet/pods/3644e06e-bc6e-4b90-935f-15c486493277/volumes" Oct 11 05:40:29 crc kubenswrapper[4651]: I1011 05:40:29.882564 4651 scope.go:117] "RemoveContainer" containerID="4f21ee74124734e4ae45f5cd30d3ff201ade9b3ffdf1481c78ccc8b374007081" Oct 11 05:40:29 crc kubenswrapper[4651]: E1011 05:40:29.886385 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 05:40:41 crc kubenswrapper[4651]: I1011 05:40:41.869640 4651 scope.go:117] "RemoveContainer" containerID="4f21ee74124734e4ae45f5cd30d3ff201ade9b3ffdf1481c78ccc8b374007081" Oct 11 05:40:41 crc kubenswrapper[4651]: E1011 05:40:41.870754 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 05:40:52 crc kubenswrapper[4651]: I1011 05:40:52.870219 4651 scope.go:117] "RemoveContainer" containerID="4f21ee74124734e4ae45f5cd30d3ff201ade9b3ffdf1481c78ccc8b374007081" Oct 11 05:40:52 crc kubenswrapper[4651]: E1011 05:40:52.871382 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 05:41:06 crc kubenswrapper[4651]: I1011 05:41:06.870257 4651 scope.go:117] "RemoveContainer" containerID="4f21ee74124734e4ae45f5cd30d3ff201ade9b3ffdf1481c78ccc8b374007081" Oct 11 05:41:06 crc kubenswrapper[4651]: E1011 05:41:06.871553 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 05:41:21 crc kubenswrapper[4651]: I1011 05:41:21.869685 4651 scope.go:117] "RemoveContainer" containerID="4f21ee74124734e4ae45f5cd30d3ff201ade9b3ffdf1481c78ccc8b374007081" Oct 11 05:41:21 crc kubenswrapper[4651]: E1011 05:41:21.870419 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 05:41:33 crc kubenswrapper[4651]: I1011 05:41:33.872519 4651 scope.go:117] "RemoveContainer" containerID="4f21ee74124734e4ae45f5cd30d3ff201ade9b3ffdf1481c78ccc8b374007081" Oct 11 05:41:33 crc kubenswrapper[4651]: E1011 05:41:33.873743 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 05:41:46 crc kubenswrapper[4651]: I1011 05:41:46.870032 4651 scope.go:117] "RemoveContainer" containerID="4f21ee74124734e4ae45f5cd30d3ff201ade9b3ffdf1481c78ccc8b374007081" Oct 11 05:41:46 crc kubenswrapper[4651]: E1011 05:41:46.871157 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 05:42:01 crc kubenswrapper[4651]: I1011 05:42:01.896329 4651 scope.go:117] "RemoveContainer" containerID="4f21ee74124734e4ae45f5cd30d3ff201ade9b3ffdf1481c78ccc8b374007081" Oct 11 05:42:01 crc kubenswrapper[4651]: E1011 05:42:01.897278 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 05:42:14 crc kubenswrapper[4651]: I1011 05:42:14.869774 4651 scope.go:117] "RemoveContainer" containerID="4f21ee74124734e4ae45f5cd30d3ff201ade9b3ffdf1481c78ccc8b374007081" Oct 11 05:42:14 crc kubenswrapper[4651]: E1011 05:42:14.870707 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 05:42:27 crc kubenswrapper[4651]: I1011 05:42:27.871206 4651 scope.go:117] "RemoveContainer" containerID="4f21ee74124734e4ae45f5cd30d3ff201ade9b3ffdf1481c78ccc8b374007081" Oct 11 05:42:27 crc kubenswrapper[4651]: E1011 05:42:27.872253 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 05:42:38 crc kubenswrapper[4651]: I1011 05:42:38.871168 4651 scope.go:117] "RemoveContainer" containerID="4f21ee74124734e4ae45f5cd30d3ff201ade9b3ffdf1481c78ccc8b374007081" Oct 11 05:42:38 crc kubenswrapper[4651]: E1011 05:42:38.872002 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 05:42:53 crc kubenswrapper[4651]: I1011 05:42:53.872904 4651 scope.go:117] "RemoveContainer" containerID="4f21ee74124734e4ae45f5cd30d3ff201ade9b3ffdf1481c78ccc8b374007081" Oct 11 05:42:53 crc kubenswrapper[4651]: E1011 05:42:53.874188 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 05:43:04 crc kubenswrapper[4651]: I1011 05:43:04.870401 4651 scope.go:117] "RemoveContainer" containerID="4f21ee74124734e4ae45f5cd30d3ff201ade9b3ffdf1481c78ccc8b374007081" Oct 11 05:43:04 crc kubenswrapper[4651]: E1011 05:43:04.872482 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 05:43:18 crc kubenswrapper[4651]: I1011 05:43:18.870516 4651 scope.go:117] "RemoveContainer" containerID="4f21ee74124734e4ae45f5cd30d3ff201ade9b3ffdf1481c78ccc8b374007081" Oct 11 05:43:18 crc kubenswrapper[4651]: E1011 05:43:18.871968 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 05:43:30 crc kubenswrapper[4651]: I1011 05:43:30.870627 4651 scope.go:117] "RemoveContainer" containerID="4f21ee74124734e4ae45f5cd30d3ff201ade9b3ffdf1481c78ccc8b374007081" Oct 11 05:43:30 crc kubenswrapper[4651]: E1011 05:43:30.872189 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 05:43:44 crc kubenswrapper[4651]: I1011 05:43:44.869166 4651 scope.go:117] "RemoveContainer" containerID="4f21ee74124734e4ae45f5cd30d3ff201ade9b3ffdf1481c78ccc8b374007081" Oct 11 05:43:44 crc kubenswrapper[4651]: E1011 05:43:44.870100 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 05:43:55 crc kubenswrapper[4651]: I1011 05:43:55.869801 4651 scope.go:117] "RemoveContainer" containerID="4f21ee74124734e4ae45f5cd30d3ff201ade9b3ffdf1481c78ccc8b374007081" Oct 11 05:43:55 crc kubenswrapper[4651]: E1011 05:43:55.870710 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 05:44:08 crc kubenswrapper[4651]: I1011 05:44:08.870761 4651 scope.go:117] "RemoveContainer" containerID="4f21ee74124734e4ae45f5cd30d3ff201ade9b3ffdf1481c78ccc8b374007081" Oct 11 05:44:08 crc kubenswrapper[4651]: E1011 05:44:08.871993 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 05:44:19 crc kubenswrapper[4651]: I1011 05:44:19.896023 4651 scope.go:117] "RemoveContainer" containerID="4f21ee74124734e4ae45f5cd30d3ff201ade9b3ffdf1481c78ccc8b374007081" Oct 11 05:44:19 crc kubenswrapper[4651]: E1011 05:44:19.898736 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 05:44:34 crc kubenswrapper[4651]: I1011 05:44:34.869476 4651 scope.go:117] "RemoveContainer" containerID="4f21ee74124734e4ae45f5cd30d3ff201ade9b3ffdf1481c78ccc8b374007081" Oct 11 05:44:34 crc kubenswrapper[4651]: E1011 05:44:34.870478 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 05:44:48 crc kubenswrapper[4651]: I1011 05:44:48.869442 4651 scope.go:117] "RemoveContainer" containerID="4f21ee74124734e4ae45f5cd30d3ff201ade9b3ffdf1481c78ccc8b374007081" Oct 11 05:44:48 crc kubenswrapper[4651]: E1011 05:44:48.870223 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 05:45:00 crc kubenswrapper[4651]: I1011 05:45:00.218207 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29336025-n4pkf"] Oct 11 05:45:00 crc kubenswrapper[4651]: E1011 05:45:00.219252 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3644e06e-bc6e-4b90-935f-15c486493277" containerName="extract-utilities" Oct 11 05:45:00 crc kubenswrapper[4651]: I1011 05:45:00.219268 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="3644e06e-bc6e-4b90-935f-15c486493277" containerName="extract-utilities" Oct 11 05:45:00 crc kubenswrapper[4651]: E1011 05:45:00.219287 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3644e06e-bc6e-4b90-935f-15c486493277" containerName="registry-server" Oct 11 05:45:00 crc kubenswrapper[4651]: I1011 05:45:00.219294 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="3644e06e-bc6e-4b90-935f-15c486493277" containerName="registry-server" Oct 11 05:45:00 crc kubenswrapper[4651]: E1011 05:45:00.219311 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3644e06e-bc6e-4b90-935f-15c486493277" containerName="extract-content" Oct 11 05:45:00 crc kubenswrapper[4651]: I1011 05:45:00.219319 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="3644e06e-bc6e-4b90-935f-15c486493277" containerName="extract-content" Oct 11 05:45:00 crc kubenswrapper[4651]: I1011 05:45:00.219542 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="3644e06e-bc6e-4b90-935f-15c486493277" containerName="registry-server" Oct 11 05:45:00 crc kubenswrapper[4651]: I1011 05:45:00.220313 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29336025-n4pkf" Oct 11 05:45:00 crc kubenswrapper[4651]: I1011 05:45:00.222868 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 11 05:45:00 crc kubenswrapper[4651]: I1011 05:45:00.223285 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 11 05:45:00 crc kubenswrapper[4651]: I1011 05:45:00.235774 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29336025-n4pkf"] Oct 11 05:45:00 crc kubenswrapper[4651]: I1011 05:45:00.336510 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e981cac9-e544-47f6-a2e0-4b087e69e9db-config-volume\") pod \"collect-profiles-29336025-n4pkf\" (UID: \"e981cac9-e544-47f6-a2e0-4b087e69e9db\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29336025-n4pkf" Oct 11 05:45:00 crc kubenswrapper[4651]: I1011 05:45:00.336878 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zn96w\" (UniqueName: \"kubernetes.io/projected/e981cac9-e544-47f6-a2e0-4b087e69e9db-kube-api-access-zn96w\") pod \"collect-profiles-29336025-n4pkf\" (UID: \"e981cac9-e544-47f6-a2e0-4b087e69e9db\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29336025-n4pkf" Oct 11 05:45:00 crc kubenswrapper[4651]: I1011 05:45:00.336903 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e981cac9-e544-47f6-a2e0-4b087e69e9db-secret-volume\") pod \"collect-profiles-29336025-n4pkf\" (UID: \"e981cac9-e544-47f6-a2e0-4b087e69e9db\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29336025-n4pkf" Oct 11 05:45:00 crc kubenswrapper[4651]: I1011 05:45:00.439437 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e981cac9-e544-47f6-a2e0-4b087e69e9db-config-volume\") pod \"collect-profiles-29336025-n4pkf\" (UID: \"e981cac9-e544-47f6-a2e0-4b087e69e9db\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29336025-n4pkf" Oct 11 05:45:00 crc kubenswrapper[4651]: I1011 05:45:00.439531 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zn96w\" (UniqueName: \"kubernetes.io/projected/e981cac9-e544-47f6-a2e0-4b087e69e9db-kube-api-access-zn96w\") pod \"collect-profiles-29336025-n4pkf\" (UID: \"e981cac9-e544-47f6-a2e0-4b087e69e9db\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29336025-n4pkf" Oct 11 05:45:00 crc kubenswrapper[4651]: I1011 05:45:00.439560 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e981cac9-e544-47f6-a2e0-4b087e69e9db-secret-volume\") pod \"collect-profiles-29336025-n4pkf\" (UID: \"e981cac9-e544-47f6-a2e0-4b087e69e9db\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29336025-n4pkf" Oct 11 05:45:00 crc kubenswrapper[4651]: I1011 05:45:00.441271 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e981cac9-e544-47f6-a2e0-4b087e69e9db-config-volume\") pod \"collect-profiles-29336025-n4pkf\" (UID: \"e981cac9-e544-47f6-a2e0-4b087e69e9db\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29336025-n4pkf" Oct 11 05:45:00 crc kubenswrapper[4651]: I1011 05:45:00.449551 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e981cac9-e544-47f6-a2e0-4b087e69e9db-secret-volume\") pod \"collect-profiles-29336025-n4pkf\" (UID: \"e981cac9-e544-47f6-a2e0-4b087e69e9db\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29336025-n4pkf" Oct 11 05:45:00 crc kubenswrapper[4651]: I1011 05:45:00.462071 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zn96w\" (UniqueName: \"kubernetes.io/projected/e981cac9-e544-47f6-a2e0-4b087e69e9db-kube-api-access-zn96w\") pod \"collect-profiles-29336025-n4pkf\" (UID: \"e981cac9-e544-47f6-a2e0-4b087e69e9db\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29336025-n4pkf" Oct 11 05:45:00 crc kubenswrapper[4651]: I1011 05:45:00.544428 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29336025-n4pkf" Oct 11 05:45:01 crc kubenswrapper[4651]: I1011 05:45:01.052699 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29336025-n4pkf"] Oct 11 05:45:01 crc kubenswrapper[4651]: W1011 05:45:01.061857 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode981cac9_e544_47f6_a2e0_4b087e69e9db.slice/crio-5a5869978e7de70b9beed847d5b5f23cbfe38db4cdc24848d88cfcebf9323280 WatchSource:0}: Error finding container 5a5869978e7de70b9beed847d5b5f23cbfe38db4cdc24848d88cfcebf9323280: Status 404 returned error can't find the container with id 5a5869978e7de70b9beed847d5b5f23cbfe38db4cdc24848d88cfcebf9323280 Oct 11 05:45:01 crc kubenswrapper[4651]: I1011 05:45:01.422513 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29336025-n4pkf" event={"ID":"e981cac9-e544-47f6-a2e0-4b087e69e9db","Type":"ContainerStarted","Data":"29bc6bb01d2b5ecc5e8410deeeb10f30f35a51741838c6723aeda17514bef258"} Oct 11 05:45:01 crc kubenswrapper[4651]: I1011 05:45:01.423053 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29336025-n4pkf" event={"ID":"e981cac9-e544-47f6-a2e0-4b087e69e9db","Type":"ContainerStarted","Data":"5a5869978e7de70b9beed847d5b5f23cbfe38db4cdc24848d88cfcebf9323280"} Oct 11 05:45:01 crc kubenswrapper[4651]: I1011 05:45:01.453384 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29336025-n4pkf" podStartSLOduration=1.45336383 podStartE2EDuration="1.45336383s" podCreationTimestamp="2025-10-11 05:45:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 05:45:01.444382136 +0000 UTC m=+3222.340614942" watchObservedRunningTime="2025-10-11 05:45:01.45336383 +0000 UTC m=+3222.349596636" Oct 11 05:45:02 crc kubenswrapper[4651]: I1011 05:45:02.435934 4651 generic.go:334] "Generic (PLEG): container finished" podID="e981cac9-e544-47f6-a2e0-4b087e69e9db" containerID="29bc6bb01d2b5ecc5e8410deeeb10f30f35a51741838c6723aeda17514bef258" exitCode=0 Oct 11 05:45:02 crc kubenswrapper[4651]: I1011 05:45:02.436029 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29336025-n4pkf" event={"ID":"e981cac9-e544-47f6-a2e0-4b087e69e9db","Type":"ContainerDied","Data":"29bc6bb01d2b5ecc5e8410deeeb10f30f35a51741838c6723aeda17514bef258"} Oct 11 05:45:03 crc kubenswrapper[4651]: I1011 05:45:03.870971 4651 scope.go:117] "RemoveContainer" containerID="4f21ee74124734e4ae45f5cd30d3ff201ade9b3ffdf1481c78ccc8b374007081" Oct 11 05:45:03 crc kubenswrapper[4651]: E1011 05:45:03.872232 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 05:45:03 crc kubenswrapper[4651]: I1011 05:45:03.946716 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29336025-n4pkf" Oct 11 05:45:04 crc kubenswrapper[4651]: I1011 05:45:04.022367 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zn96w\" (UniqueName: \"kubernetes.io/projected/e981cac9-e544-47f6-a2e0-4b087e69e9db-kube-api-access-zn96w\") pod \"e981cac9-e544-47f6-a2e0-4b087e69e9db\" (UID: \"e981cac9-e544-47f6-a2e0-4b087e69e9db\") " Oct 11 05:45:04 crc kubenswrapper[4651]: I1011 05:45:04.022556 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e981cac9-e544-47f6-a2e0-4b087e69e9db-config-volume\") pod \"e981cac9-e544-47f6-a2e0-4b087e69e9db\" (UID: \"e981cac9-e544-47f6-a2e0-4b087e69e9db\") " Oct 11 05:45:04 crc kubenswrapper[4651]: I1011 05:45:04.022626 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e981cac9-e544-47f6-a2e0-4b087e69e9db-secret-volume\") pod \"e981cac9-e544-47f6-a2e0-4b087e69e9db\" (UID: \"e981cac9-e544-47f6-a2e0-4b087e69e9db\") " Oct 11 05:45:04 crc kubenswrapper[4651]: I1011 05:45:04.023519 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e981cac9-e544-47f6-a2e0-4b087e69e9db-config-volume" (OuterVolumeSpecName: "config-volume") pod "e981cac9-e544-47f6-a2e0-4b087e69e9db" (UID: "e981cac9-e544-47f6-a2e0-4b087e69e9db"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:45:04 crc kubenswrapper[4651]: I1011 05:45:04.024098 4651 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e981cac9-e544-47f6-a2e0-4b087e69e9db-config-volume\") on node \"crc\" DevicePath \"\"" Oct 11 05:45:04 crc kubenswrapper[4651]: I1011 05:45:04.032870 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e981cac9-e544-47f6-a2e0-4b087e69e9db-kube-api-access-zn96w" (OuterVolumeSpecName: "kube-api-access-zn96w") pod "e981cac9-e544-47f6-a2e0-4b087e69e9db" (UID: "e981cac9-e544-47f6-a2e0-4b087e69e9db"). InnerVolumeSpecName "kube-api-access-zn96w". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:45:04 crc kubenswrapper[4651]: I1011 05:45:04.032972 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e981cac9-e544-47f6-a2e0-4b087e69e9db-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "e981cac9-e544-47f6-a2e0-4b087e69e9db" (UID: "e981cac9-e544-47f6-a2e0-4b087e69e9db"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:45:04 crc kubenswrapper[4651]: I1011 05:45:04.126131 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zn96w\" (UniqueName: \"kubernetes.io/projected/e981cac9-e544-47f6-a2e0-4b087e69e9db-kube-api-access-zn96w\") on node \"crc\" DevicePath \"\"" Oct 11 05:45:04 crc kubenswrapper[4651]: I1011 05:45:04.126163 4651 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e981cac9-e544-47f6-a2e0-4b087e69e9db-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 11 05:45:04 crc kubenswrapper[4651]: I1011 05:45:04.463806 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29336025-n4pkf" event={"ID":"e981cac9-e544-47f6-a2e0-4b087e69e9db","Type":"ContainerDied","Data":"5a5869978e7de70b9beed847d5b5f23cbfe38db4cdc24848d88cfcebf9323280"} Oct 11 05:45:04 crc kubenswrapper[4651]: I1011 05:45:04.463880 4651 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5a5869978e7de70b9beed847d5b5f23cbfe38db4cdc24848d88cfcebf9323280" Oct 11 05:45:04 crc kubenswrapper[4651]: I1011 05:45:04.463963 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29336025-n4pkf" Oct 11 05:45:04 crc kubenswrapper[4651]: I1011 05:45:04.547288 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29335980-f69sw"] Oct 11 05:45:04 crc kubenswrapper[4651]: I1011 05:45:04.557845 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29335980-f69sw"] Oct 11 05:45:05 crc kubenswrapper[4651]: I1011 05:45:05.887695 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8efea488-0eb0-460f-bab5-c818de148253" path="/var/lib/kubelet/pods/8efea488-0eb0-460f-bab5-c818de148253/volumes" Oct 11 05:45:18 crc kubenswrapper[4651]: I1011 05:45:18.871155 4651 scope.go:117] "RemoveContainer" containerID="4f21ee74124734e4ae45f5cd30d3ff201ade9b3ffdf1481c78ccc8b374007081" Oct 11 05:45:19 crc kubenswrapper[4651]: I1011 05:45:19.641510 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" event={"ID":"519a1ae1-e964-48b0-8b61-835146df28c1","Type":"ContainerStarted","Data":"1098930e9e839a15ff635912f8f330c5224821ed8362696a83b86f2f3eaded43"} Oct 11 05:45:35 crc kubenswrapper[4651]: I1011 05:45:35.422993 4651 scope.go:117] "RemoveContainer" containerID="17ca8604931a040d17964403349a1dd93fa8f19eb8708f131e02b8db1b5015d6" Oct 11 05:46:10 crc kubenswrapper[4651]: I1011 05:46:10.276500 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-44nbx"] Oct 11 05:46:10 crc kubenswrapper[4651]: E1011 05:46:10.277701 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e981cac9-e544-47f6-a2e0-4b087e69e9db" containerName="collect-profiles" Oct 11 05:46:10 crc kubenswrapper[4651]: I1011 05:46:10.277722 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="e981cac9-e544-47f6-a2e0-4b087e69e9db" containerName="collect-profiles" Oct 11 05:46:10 crc kubenswrapper[4651]: I1011 05:46:10.278234 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="e981cac9-e544-47f6-a2e0-4b087e69e9db" containerName="collect-profiles" Oct 11 05:46:10 crc kubenswrapper[4651]: I1011 05:46:10.280577 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-44nbx" Oct 11 05:46:10 crc kubenswrapper[4651]: I1011 05:46:10.308430 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-44nbx"] Oct 11 05:46:10 crc kubenswrapper[4651]: I1011 05:46:10.361777 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8cbbd9c1-1890-4be6-ac9f-13bb4ab62b50-catalog-content\") pod \"redhat-marketplace-44nbx\" (UID: \"8cbbd9c1-1890-4be6-ac9f-13bb4ab62b50\") " pod="openshift-marketplace/redhat-marketplace-44nbx" Oct 11 05:46:10 crc kubenswrapper[4651]: I1011 05:46:10.361901 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lzd2v\" (UniqueName: \"kubernetes.io/projected/8cbbd9c1-1890-4be6-ac9f-13bb4ab62b50-kube-api-access-lzd2v\") pod \"redhat-marketplace-44nbx\" (UID: \"8cbbd9c1-1890-4be6-ac9f-13bb4ab62b50\") " pod="openshift-marketplace/redhat-marketplace-44nbx" Oct 11 05:46:10 crc kubenswrapper[4651]: I1011 05:46:10.361930 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8cbbd9c1-1890-4be6-ac9f-13bb4ab62b50-utilities\") pod \"redhat-marketplace-44nbx\" (UID: \"8cbbd9c1-1890-4be6-ac9f-13bb4ab62b50\") " pod="openshift-marketplace/redhat-marketplace-44nbx" Oct 11 05:46:10 crc kubenswrapper[4651]: I1011 05:46:10.463570 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8cbbd9c1-1890-4be6-ac9f-13bb4ab62b50-catalog-content\") pod \"redhat-marketplace-44nbx\" (UID: \"8cbbd9c1-1890-4be6-ac9f-13bb4ab62b50\") " pod="openshift-marketplace/redhat-marketplace-44nbx" Oct 11 05:46:10 crc kubenswrapper[4651]: I1011 05:46:10.463646 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lzd2v\" (UniqueName: \"kubernetes.io/projected/8cbbd9c1-1890-4be6-ac9f-13bb4ab62b50-kube-api-access-lzd2v\") pod \"redhat-marketplace-44nbx\" (UID: \"8cbbd9c1-1890-4be6-ac9f-13bb4ab62b50\") " pod="openshift-marketplace/redhat-marketplace-44nbx" Oct 11 05:46:10 crc kubenswrapper[4651]: I1011 05:46:10.463678 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8cbbd9c1-1890-4be6-ac9f-13bb4ab62b50-utilities\") pod \"redhat-marketplace-44nbx\" (UID: \"8cbbd9c1-1890-4be6-ac9f-13bb4ab62b50\") " pod="openshift-marketplace/redhat-marketplace-44nbx" Oct 11 05:46:10 crc kubenswrapper[4651]: I1011 05:46:10.464310 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8cbbd9c1-1890-4be6-ac9f-13bb4ab62b50-catalog-content\") pod \"redhat-marketplace-44nbx\" (UID: \"8cbbd9c1-1890-4be6-ac9f-13bb4ab62b50\") " pod="openshift-marketplace/redhat-marketplace-44nbx" Oct 11 05:46:10 crc kubenswrapper[4651]: I1011 05:46:10.464689 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8cbbd9c1-1890-4be6-ac9f-13bb4ab62b50-utilities\") pod \"redhat-marketplace-44nbx\" (UID: \"8cbbd9c1-1890-4be6-ac9f-13bb4ab62b50\") " pod="openshift-marketplace/redhat-marketplace-44nbx" Oct 11 05:46:10 crc kubenswrapper[4651]: I1011 05:46:10.488999 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lzd2v\" (UniqueName: \"kubernetes.io/projected/8cbbd9c1-1890-4be6-ac9f-13bb4ab62b50-kube-api-access-lzd2v\") pod \"redhat-marketplace-44nbx\" (UID: \"8cbbd9c1-1890-4be6-ac9f-13bb4ab62b50\") " pod="openshift-marketplace/redhat-marketplace-44nbx" Oct 11 05:46:10 crc kubenswrapper[4651]: I1011 05:46:10.609413 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-44nbx" Oct 11 05:46:11 crc kubenswrapper[4651]: I1011 05:46:11.099444 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-44nbx"] Oct 11 05:46:11 crc kubenswrapper[4651]: I1011 05:46:11.238032 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-44nbx" event={"ID":"8cbbd9c1-1890-4be6-ac9f-13bb4ab62b50","Type":"ContainerStarted","Data":"ae27c9d91a52445e470f36f9adb10010eb5cd61e6337336a399d97d12393528e"} Oct 11 05:46:12 crc kubenswrapper[4651]: I1011 05:46:12.250318 4651 generic.go:334] "Generic (PLEG): container finished" podID="8cbbd9c1-1890-4be6-ac9f-13bb4ab62b50" containerID="90b9472685f4b6dfc5007f3a8107d2868932d3cca7c3f5e9c4ed2ee08df9eb12" exitCode=0 Oct 11 05:46:12 crc kubenswrapper[4651]: I1011 05:46:12.250428 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-44nbx" event={"ID":"8cbbd9c1-1890-4be6-ac9f-13bb4ab62b50","Type":"ContainerDied","Data":"90b9472685f4b6dfc5007f3a8107d2868932d3cca7c3f5e9c4ed2ee08df9eb12"} Oct 11 05:46:12 crc kubenswrapper[4651]: I1011 05:46:12.252922 4651 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 11 05:46:13 crc kubenswrapper[4651]: I1011 05:46:13.261376 4651 generic.go:334] "Generic (PLEG): container finished" podID="8cbbd9c1-1890-4be6-ac9f-13bb4ab62b50" containerID="8301afb29b3c294b9279361fe7e4ad0bf67a4a7a3d154a2e05defef0e21c3a6f" exitCode=0 Oct 11 05:46:13 crc kubenswrapper[4651]: I1011 05:46:13.261442 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-44nbx" event={"ID":"8cbbd9c1-1890-4be6-ac9f-13bb4ab62b50","Type":"ContainerDied","Data":"8301afb29b3c294b9279361fe7e4ad0bf67a4a7a3d154a2e05defef0e21c3a6f"} Oct 11 05:46:14 crc kubenswrapper[4651]: I1011 05:46:14.276955 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-44nbx" event={"ID":"8cbbd9c1-1890-4be6-ac9f-13bb4ab62b50","Type":"ContainerStarted","Data":"0e7192c3b67d6de2aff3b135379bcc66eb86de450b17741533a088473ec154da"} Oct 11 05:46:14 crc kubenswrapper[4651]: I1011 05:46:14.312173 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-44nbx" podStartSLOduration=2.908226902 podStartE2EDuration="4.31213782s" podCreationTimestamp="2025-10-11 05:46:10 +0000 UTC" firstStartedPulling="2025-10-11 05:46:12.25261852 +0000 UTC m=+3293.148851326" lastFinishedPulling="2025-10-11 05:46:13.656529418 +0000 UTC m=+3294.552762244" observedRunningTime="2025-10-11 05:46:14.302084788 +0000 UTC m=+3295.198317604" watchObservedRunningTime="2025-10-11 05:46:14.31213782 +0000 UTC m=+3295.208370656" Oct 11 05:46:20 crc kubenswrapper[4651]: I1011 05:46:20.610062 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-44nbx" Oct 11 05:46:20 crc kubenswrapper[4651]: I1011 05:46:20.611068 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-44nbx" Oct 11 05:46:20 crc kubenswrapper[4651]: I1011 05:46:20.668688 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-44nbx" Oct 11 05:46:21 crc kubenswrapper[4651]: I1011 05:46:21.431696 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-44nbx" Oct 11 05:46:21 crc kubenswrapper[4651]: I1011 05:46:21.488464 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-44nbx"] Oct 11 05:46:23 crc kubenswrapper[4651]: I1011 05:46:23.362981 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-44nbx" podUID="8cbbd9c1-1890-4be6-ac9f-13bb4ab62b50" containerName="registry-server" containerID="cri-o://0e7192c3b67d6de2aff3b135379bcc66eb86de450b17741533a088473ec154da" gracePeriod=2 Oct 11 05:46:24 crc kubenswrapper[4651]: I1011 05:46:24.018320 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-44nbx" Oct 11 05:46:24 crc kubenswrapper[4651]: I1011 05:46:24.135119 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8cbbd9c1-1890-4be6-ac9f-13bb4ab62b50-catalog-content\") pod \"8cbbd9c1-1890-4be6-ac9f-13bb4ab62b50\" (UID: \"8cbbd9c1-1890-4be6-ac9f-13bb4ab62b50\") " Oct 11 05:46:24 crc kubenswrapper[4651]: I1011 05:46:24.135234 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8cbbd9c1-1890-4be6-ac9f-13bb4ab62b50-utilities\") pod \"8cbbd9c1-1890-4be6-ac9f-13bb4ab62b50\" (UID: \"8cbbd9c1-1890-4be6-ac9f-13bb4ab62b50\") " Oct 11 05:46:24 crc kubenswrapper[4651]: I1011 05:46:24.135456 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzd2v\" (UniqueName: \"kubernetes.io/projected/8cbbd9c1-1890-4be6-ac9f-13bb4ab62b50-kube-api-access-lzd2v\") pod \"8cbbd9c1-1890-4be6-ac9f-13bb4ab62b50\" (UID: \"8cbbd9c1-1890-4be6-ac9f-13bb4ab62b50\") " Oct 11 05:46:24 crc kubenswrapper[4651]: I1011 05:46:24.136808 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8cbbd9c1-1890-4be6-ac9f-13bb4ab62b50-utilities" (OuterVolumeSpecName: "utilities") pod "8cbbd9c1-1890-4be6-ac9f-13bb4ab62b50" (UID: "8cbbd9c1-1890-4be6-ac9f-13bb4ab62b50"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 05:46:24 crc kubenswrapper[4651]: I1011 05:46:24.160036 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cbbd9c1-1890-4be6-ac9f-13bb4ab62b50-kube-api-access-lzd2v" (OuterVolumeSpecName: "kube-api-access-lzd2v") pod "8cbbd9c1-1890-4be6-ac9f-13bb4ab62b50" (UID: "8cbbd9c1-1890-4be6-ac9f-13bb4ab62b50"). InnerVolumeSpecName "kube-api-access-lzd2v". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:46:24 crc kubenswrapper[4651]: I1011 05:46:24.173595 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8cbbd9c1-1890-4be6-ac9f-13bb4ab62b50-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8cbbd9c1-1890-4be6-ac9f-13bb4ab62b50" (UID: "8cbbd9c1-1890-4be6-ac9f-13bb4ab62b50"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 05:46:24 crc kubenswrapper[4651]: I1011 05:46:24.240021 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzd2v\" (UniqueName: \"kubernetes.io/projected/8cbbd9c1-1890-4be6-ac9f-13bb4ab62b50-kube-api-access-lzd2v\") on node \"crc\" DevicePath \"\"" Oct 11 05:46:24 crc kubenswrapper[4651]: I1011 05:46:24.240255 4651 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8cbbd9c1-1890-4be6-ac9f-13bb4ab62b50-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 11 05:46:24 crc kubenswrapper[4651]: I1011 05:46:24.240318 4651 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8cbbd9c1-1890-4be6-ac9f-13bb4ab62b50-utilities\") on node \"crc\" DevicePath \"\"" Oct 11 05:46:24 crc kubenswrapper[4651]: I1011 05:46:24.381434 4651 generic.go:334] "Generic (PLEG): container finished" podID="8cbbd9c1-1890-4be6-ac9f-13bb4ab62b50" containerID="0e7192c3b67d6de2aff3b135379bcc66eb86de450b17741533a088473ec154da" exitCode=0 Oct 11 05:46:24 crc kubenswrapper[4651]: I1011 05:46:24.381476 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-44nbx" event={"ID":"8cbbd9c1-1890-4be6-ac9f-13bb4ab62b50","Type":"ContainerDied","Data":"0e7192c3b67d6de2aff3b135379bcc66eb86de450b17741533a088473ec154da"} Oct 11 05:46:24 crc kubenswrapper[4651]: I1011 05:46:24.381534 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-44nbx" event={"ID":"8cbbd9c1-1890-4be6-ac9f-13bb4ab62b50","Type":"ContainerDied","Data":"ae27c9d91a52445e470f36f9adb10010eb5cd61e6337336a399d97d12393528e"} Oct 11 05:46:24 crc kubenswrapper[4651]: I1011 05:46:24.381556 4651 scope.go:117] "RemoveContainer" containerID="0e7192c3b67d6de2aff3b135379bcc66eb86de450b17741533a088473ec154da" Oct 11 05:46:24 crc kubenswrapper[4651]: I1011 05:46:24.383418 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-44nbx" Oct 11 05:46:24 crc kubenswrapper[4651]: I1011 05:46:24.408806 4651 scope.go:117] "RemoveContainer" containerID="8301afb29b3c294b9279361fe7e4ad0bf67a4a7a3d154a2e05defef0e21c3a6f" Oct 11 05:46:24 crc kubenswrapper[4651]: I1011 05:46:24.438232 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-44nbx"] Oct 11 05:46:24 crc kubenswrapper[4651]: I1011 05:46:24.444686 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-44nbx"] Oct 11 05:46:24 crc kubenswrapper[4651]: I1011 05:46:24.453213 4651 scope.go:117] "RemoveContainer" containerID="90b9472685f4b6dfc5007f3a8107d2868932d3cca7c3f5e9c4ed2ee08df9eb12" Oct 11 05:46:24 crc kubenswrapper[4651]: I1011 05:46:24.523408 4651 scope.go:117] "RemoveContainer" containerID="0e7192c3b67d6de2aff3b135379bcc66eb86de450b17741533a088473ec154da" Oct 11 05:46:24 crc kubenswrapper[4651]: E1011 05:46:24.524551 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0e7192c3b67d6de2aff3b135379bcc66eb86de450b17741533a088473ec154da\": container with ID starting with 0e7192c3b67d6de2aff3b135379bcc66eb86de450b17741533a088473ec154da not found: ID does not exist" containerID="0e7192c3b67d6de2aff3b135379bcc66eb86de450b17741533a088473ec154da" Oct 11 05:46:24 crc kubenswrapper[4651]: I1011 05:46:24.524617 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0e7192c3b67d6de2aff3b135379bcc66eb86de450b17741533a088473ec154da"} err="failed to get container status \"0e7192c3b67d6de2aff3b135379bcc66eb86de450b17741533a088473ec154da\": rpc error: code = NotFound desc = could not find container \"0e7192c3b67d6de2aff3b135379bcc66eb86de450b17741533a088473ec154da\": container with ID starting with 0e7192c3b67d6de2aff3b135379bcc66eb86de450b17741533a088473ec154da not found: ID does not exist" Oct 11 05:46:24 crc kubenswrapper[4651]: I1011 05:46:24.524652 4651 scope.go:117] "RemoveContainer" containerID="8301afb29b3c294b9279361fe7e4ad0bf67a4a7a3d154a2e05defef0e21c3a6f" Oct 11 05:46:24 crc kubenswrapper[4651]: E1011 05:46:24.525037 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8301afb29b3c294b9279361fe7e4ad0bf67a4a7a3d154a2e05defef0e21c3a6f\": container with ID starting with 8301afb29b3c294b9279361fe7e4ad0bf67a4a7a3d154a2e05defef0e21c3a6f not found: ID does not exist" containerID="8301afb29b3c294b9279361fe7e4ad0bf67a4a7a3d154a2e05defef0e21c3a6f" Oct 11 05:46:24 crc kubenswrapper[4651]: I1011 05:46:24.525149 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8301afb29b3c294b9279361fe7e4ad0bf67a4a7a3d154a2e05defef0e21c3a6f"} err="failed to get container status \"8301afb29b3c294b9279361fe7e4ad0bf67a4a7a3d154a2e05defef0e21c3a6f\": rpc error: code = NotFound desc = could not find container \"8301afb29b3c294b9279361fe7e4ad0bf67a4a7a3d154a2e05defef0e21c3a6f\": container with ID starting with 8301afb29b3c294b9279361fe7e4ad0bf67a4a7a3d154a2e05defef0e21c3a6f not found: ID does not exist" Oct 11 05:46:24 crc kubenswrapper[4651]: I1011 05:46:24.525273 4651 scope.go:117] "RemoveContainer" containerID="90b9472685f4b6dfc5007f3a8107d2868932d3cca7c3f5e9c4ed2ee08df9eb12" Oct 11 05:46:24 crc kubenswrapper[4651]: E1011 05:46:24.525628 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"90b9472685f4b6dfc5007f3a8107d2868932d3cca7c3f5e9c4ed2ee08df9eb12\": container with ID starting with 90b9472685f4b6dfc5007f3a8107d2868932d3cca7c3f5e9c4ed2ee08df9eb12 not found: ID does not exist" containerID="90b9472685f4b6dfc5007f3a8107d2868932d3cca7c3f5e9c4ed2ee08df9eb12" Oct 11 05:46:24 crc kubenswrapper[4651]: I1011 05:46:24.525668 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"90b9472685f4b6dfc5007f3a8107d2868932d3cca7c3f5e9c4ed2ee08df9eb12"} err="failed to get container status \"90b9472685f4b6dfc5007f3a8107d2868932d3cca7c3f5e9c4ed2ee08df9eb12\": rpc error: code = NotFound desc = could not find container \"90b9472685f4b6dfc5007f3a8107d2868932d3cca7c3f5e9c4ed2ee08df9eb12\": container with ID starting with 90b9472685f4b6dfc5007f3a8107d2868932d3cca7c3f5e9c4ed2ee08df9eb12 not found: ID does not exist" Oct 11 05:46:25 crc kubenswrapper[4651]: I1011 05:46:25.887609 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cbbd9c1-1890-4be6-ac9f-13bb4ab62b50" path="/var/lib/kubelet/pods/8cbbd9c1-1890-4be6-ac9f-13bb4ab62b50/volumes" Oct 11 05:47:34 crc kubenswrapper[4651]: I1011 05:47:34.949974 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-62f7l"] Oct 11 05:47:34 crc kubenswrapper[4651]: E1011 05:47:34.951026 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8cbbd9c1-1890-4be6-ac9f-13bb4ab62b50" containerName="extract-utilities" Oct 11 05:47:34 crc kubenswrapper[4651]: I1011 05:47:34.951046 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="8cbbd9c1-1890-4be6-ac9f-13bb4ab62b50" containerName="extract-utilities" Oct 11 05:47:34 crc kubenswrapper[4651]: E1011 05:47:34.951067 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8cbbd9c1-1890-4be6-ac9f-13bb4ab62b50" containerName="registry-server" Oct 11 05:47:34 crc kubenswrapper[4651]: I1011 05:47:34.951077 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="8cbbd9c1-1890-4be6-ac9f-13bb4ab62b50" containerName="registry-server" Oct 11 05:47:34 crc kubenswrapper[4651]: E1011 05:47:34.951114 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8cbbd9c1-1890-4be6-ac9f-13bb4ab62b50" containerName="extract-content" Oct 11 05:47:34 crc kubenswrapper[4651]: I1011 05:47:34.951124 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="8cbbd9c1-1890-4be6-ac9f-13bb4ab62b50" containerName="extract-content" Oct 11 05:47:34 crc kubenswrapper[4651]: I1011 05:47:34.951349 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="8cbbd9c1-1890-4be6-ac9f-13bb4ab62b50" containerName="registry-server" Oct 11 05:47:34 crc kubenswrapper[4651]: I1011 05:47:34.953108 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-62f7l" Oct 11 05:47:34 crc kubenswrapper[4651]: I1011 05:47:34.969000 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-62f7l"] Oct 11 05:47:35 crc kubenswrapper[4651]: I1011 05:47:35.111088 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1958c7ba-76fb-47f8-b154-d114bdead35f-utilities\") pod \"redhat-operators-62f7l\" (UID: \"1958c7ba-76fb-47f8-b154-d114bdead35f\") " pod="openshift-marketplace/redhat-operators-62f7l" Oct 11 05:47:35 crc kubenswrapper[4651]: I1011 05:47:35.111226 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1958c7ba-76fb-47f8-b154-d114bdead35f-catalog-content\") pod \"redhat-operators-62f7l\" (UID: \"1958c7ba-76fb-47f8-b154-d114bdead35f\") " pod="openshift-marketplace/redhat-operators-62f7l" Oct 11 05:47:35 crc kubenswrapper[4651]: I1011 05:47:35.111517 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cw826\" (UniqueName: \"kubernetes.io/projected/1958c7ba-76fb-47f8-b154-d114bdead35f-kube-api-access-cw826\") pod \"redhat-operators-62f7l\" (UID: \"1958c7ba-76fb-47f8-b154-d114bdead35f\") " pod="openshift-marketplace/redhat-operators-62f7l" Oct 11 05:47:35 crc kubenswrapper[4651]: I1011 05:47:35.214718 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cw826\" (UniqueName: \"kubernetes.io/projected/1958c7ba-76fb-47f8-b154-d114bdead35f-kube-api-access-cw826\") pod \"redhat-operators-62f7l\" (UID: \"1958c7ba-76fb-47f8-b154-d114bdead35f\") " pod="openshift-marketplace/redhat-operators-62f7l" Oct 11 05:47:35 crc kubenswrapper[4651]: I1011 05:47:35.215002 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1958c7ba-76fb-47f8-b154-d114bdead35f-utilities\") pod \"redhat-operators-62f7l\" (UID: \"1958c7ba-76fb-47f8-b154-d114bdead35f\") " pod="openshift-marketplace/redhat-operators-62f7l" Oct 11 05:47:35 crc kubenswrapper[4651]: I1011 05:47:35.215148 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1958c7ba-76fb-47f8-b154-d114bdead35f-catalog-content\") pod \"redhat-operators-62f7l\" (UID: \"1958c7ba-76fb-47f8-b154-d114bdead35f\") " pod="openshift-marketplace/redhat-operators-62f7l" Oct 11 05:47:35 crc kubenswrapper[4651]: I1011 05:47:35.216108 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1958c7ba-76fb-47f8-b154-d114bdead35f-utilities\") pod \"redhat-operators-62f7l\" (UID: \"1958c7ba-76fb-47f8-b154-d114bdead35f\") " pod="openshift-marketplace/redhat-operators-62f7l" Oct 11 05:47:35 crc kubenswrapper[4651]: I1011 05:47:35.216481 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1958c7ba-76fb-47f8-b154-d114bdead35f-catalog-content\") pod \"redhat-operators-62f7l\" (UID: \"1958c7ba-76fb-47f8-b154-d114bdead35f\") " pod="openshift-marketplace/redhat-operators-62f7l" Oct 11 05:47:35 crc kubenswrapper[4651]: I1011 05:47:35.244286 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cw826\" (UniqueName: \"kubernetes.io/projected/1958c7ba-76fb-47f8-b154-d114bdead35f-kube-api-access-cw826\") pod \"redhat-operators-62f7l\" (UID: \"1958c7ba-76fb-47f8-b154-d114bdead35f\") " pod="openshift-marketplace/redhat-operators-62f7l" Oct 11 05:47:35 crc kubenswrapper[4651]: I1011 05:47:35.290311 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-62f7l" Oct 11 05:47:35 crc kubenswrapper[4651]: I1011 05:47:35.633102 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-62f7l"] Oct 11 05:47:36 crc kubenswrapper[4651]: I1011 05:47:36.262887 4651 generic.go:334] "Generic (PLEG): container finished" podID="1958c7ba-76fb-47f8-b154-d114bdead35f" containerID="aafc52bc059f63c32a4f17ef4f29616362caf5af0cbe21746860d550e360ac2b" exitCode=0 Oct 11 05:47:36 crc kubenswrapper[4651]: I1011 05:47:36.262952 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-62f7l" event={"ID":"1958c7ba-76fb-47f8-b154-d114bdead35f","Type":"ContainerDied","Data":"aafc52bc059f63c32a4f17ef4f29616362caf5af0cbe21746860d550e360ac2b"} Oct 11 05:47:36 crc kubenswrapper[4651]: I1011 05:47:36.263030 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-62f7l" event={"ID":"1958c7ba-76fb-47f8-b154-d114bdead35f","Type":"ContainerStarted","Data":"402af08bfa245b1409a3ed74eac9adbd8976d6bdc9489c0be8e623adc21e2caa"} Oct 11 05:47:37 crc kubenswrapper[4651]: I1011 05:47:37.279603 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-62f7l" event={"ID":"1958c7ba-76fb-47f8-b154-d114bdead35f","Type":"ContainerStarted","Data":"e93c8a0174ce061be7e8592309ef198b00f0dd841755086aa7ed2587cbaa65ef"} Oct 11 05:47:39 crc kubenswrapper[4651]: I1011 05:47:39.303277 4651 generic.go:334] "Generic (PLEG): container finished" podID="1958c7ba-76fb-47f8-b154-d114bdead35f" containerID="e93c8a0174ce061be7e8592309ef198b00f0dd841755086aa7ed2587cbaa65ef" exitCode=0 Oct 11 05:47:39 crc kubenswrapper[4651]: I1011 05:47:39.303407 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-62f7l" event={"ID":"1958c7ba-76fb-47f8-b154-d114bdead35f","Type":"ContainerDied","Data":"e93c8a0174ce061be7e8592309ef198b00f0dd841755086aa7ed2587cbaa65ef"} Oct 11 05:47:40 crc kubenswrapper[4651]: I1011 05:47:40.316462 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-62f7l" event={"ID":"1958c7ba-76fb-47f8-b154-d114bdead35f","Type":"ContainerStarted","Data":"b81dc0be3e08033d5b3b26f0c537168a86ce8544995af40acc7ec04456aa510b"} Oct 11 05:47:40 crc kubenswrapper[4651]: I1011 05:47:40.352723 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-62f7l" podStartSLOduration=2.9088149210000003 podStartE2EDuration="6.352703501s" podCreationTimestamp="2025-10-11 05:47:34 +0000 UTC" firstStartedPulling="2025-10-11 05:47:36.267341887 +0000 UTC m=+3377.163574683" lastFinishedPulling="2025-10-11 05:47:39.711230427 +0000 UTC m=+3380.607463263" observedRunningTime="2025-10-11 05:47:40.343755078 +0000 UTC m=+3381.239987924" watchObservedRunningTime="2025-10-11 05:47:40.352703501 +0000 UTC m=+3381.248936307" Oct 11 05:47:45 crc kubenswrapper[4651]: I1011 05:47:45.291621 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-62f7l" Oct 11 05:47:45 crc kubenswrapper[4651]: I1011 05:47:45.294073 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-62f7l" Oct 11 05:47:46 crc kubenswrapper[4651]: I1011 05:47:46.310535 4651 patch_prober.go:28] interesting pod/machine-config-daemon-78jnv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 05:47:46 crc kubenswrapper[4651]: I1011 05:47:46.310625 4651 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 05:47:46 crc kubenswrapper[4651]: I1011 05:47:46.361219 4651 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-62f7l" podUID="1958c7ba-76fb-47f8-b154-d114bdead35f" containerName="registry-server" probeResult="failure" output=< Oct 11 05:47:46 crc kubenswrapper[4651]: timeout: failed to connect service ":50051" within 1s Oct 11 05:47:46 crc kubenswrapper[4651]: > Oct 11 05:47:49 crc kubenswrapper[4651]: I1011 05:47:49.342282 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-lbnkg"] Oct 11 05:47:49 crc kubenswrapper[4651]: I1011 05:47:49.348662 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-lbnkg" Oct 11 05:47:49 crc kubenswrapper[4651]: I1011 05:47:49.375884 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-lbnkg"] Oct 11 05:47:49 crc kubenswrapper[4651]: I1011 05:47:49.515904 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4544dbff-3a0a-4eb3-94f2-55961043d4d6-utilities\") pod \"community-operators-lbnkg\" (UID: \"4544dbff-3a0a-4eb3-94f2-55961043d4d6\") " pod="openshift-marketplace/community-operators-lbnkg" Oct 11 05:47:49 crc kubenswrapper[4651]: I1011 05:47:49.516126 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4544dbff-3a0a-4eb3-94f2-55961043d4d6-catalog-content\") pod \"community-operators-lbnkg\" (UID: \"4544dbff-3a0a-4eb3-94f2-55961043d4d6\") " pod="openshift-marketplace/community-operators-lbnkg" Oct 11 05:47:49 crc kubenswrapper[4651]: I1011 05:47:49.516219 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tptj2\" (UniqueName: \"kubernetes.io/projected/4544dbff-3a0a-4eb3-94f2-55961043d4d6-kube-api-access-tptj2\") pod \"community-operators-lbnkg\" (UID: \"4544dbff-3a0a-4eb3-94f2-55961043d4d6\") " pod="openshift-marketplace/community-operators-lbnkg" Oct 11 05:47:49 crc kubenswrapper[4651]: I1011 05:47:49.617771 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4544dbff-3a0a-4eb3-94f2-55961043d4d6-catalog-content\") pod \"community-operators-lbnkg\" (UID: \"4544dbff-3a0a-4eb3-94f2-55961043d4d6\") " pod="openshift-marketplace/community-operators-lbnkg" Oct 11 05:47:49 crc kubenswrapper[4651]: I1011 05:47:49.617891 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tptj2\" (UniqueName: \"kubernetes.io/projected/4544dbff-3a0a-4eb3-94f2-55961043d4d6-kube-api-access-tptj2\") pod \"community-operators-lbnkg\" (UID: \"4544dbff-3a0a-4eb3-94f2-55961043d4d6\") " pod="openshift-marketplace/community-operators-lbnkg" Oct 11 05:47:49 crc kubenswrapper[4651]: I1011 05:47:49.617989 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4544dbff-3a0a-4eb3-94f2-55961043d4d6-utilities\") pod \"community-operators-lbnkg\" (UID: \"4544dbff-3a0a-4eb3-94f2-55961043d4d6\") " pod="openshift-marketplace/community-operators-lbnkg" Oct 11 05:47:49 crc kubenswrapper[4651]: I1011 05:47:49.618483 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4544dbff-3a0a-4eb3-94f2-55961043d4d6-catalog-content\") pod \"community-operators-lbnkg\" (UID: \"4544dbff-3a0a-4eb3-94f2-55961043d4d6\") " pod="openshift-marketplace/community-operators-lbnkg" Oct 11 05:47:49 crc kubenswrapper[4651]: I1011 05:47:49.618624 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4544dbff-3a0a-4eb3-94f2-55961043d4d6-utilities\") pod \"community-operators-lbnkg\" (UID: \"4544dbff-3a0a-4eb3-94f2-55961043d4d6\") " pod="openshift-marketplace/community-operators-lbnkg" Oct 11 05:47:49 crc kubenswrapper[4651]: I1011 05:47:49.641671 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tptj2\" (UniqueName: \"kubernetes.io/projected/4544dbff-3a0a-4eb3-94f2-55961043d4d6-kube-api-access-tptj2\") pod \"community-operators-lbnkg\" (UID: \"4544dbff-3a0a-4eb3-94f2-55961043d4d6\") " pod="openshift-marketplace/community-operators-lbnkg" Oct 11 05:47:49 crc kubenswrapper[4651]: I1011 05:47:49.710916 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-lbnkg" Oct 11 05:47:50 crc kubenswrapper[4651]: I1011 05:47:50.219886 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-lbnkg"] Oct 11 05:47:50 crc kubenswrapper[4651]: I1011 05:47:50.451200 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lbnkg" event={"ID":"4544dbff-3a0a-4eb3-94f2-55961043d4d6","Type":"ContainerStarted","Data":"5c1173f3a6718d2a6a209e0e1b580a5bde3c1484092c29020b1055697703eedf"} Oct 11 05:47:51 crc kubenswrapper[4651]: I1011 05:47:51.465427 4651 generic.go:334] "Generic (PLEG): container finished" podID="4544dbff-3a0a-4eb3-94f2-55961043d4d6" containerID="93300f0f3a8a397b64a4f628888e2cb3520c7f913297832e85e0998ff66664b5" exitCode=0 Oct 11 05:47:51 crc kubenswrapper[4651]: I1011 05:47:51.465477 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lbnkg" event={"ID":"4544dbff-3a0a-4eb3-94f2-55961043d4d6","Type":"ContainerDied","Data":"93300f0f3a8a397b64a4f628888e2cb3520c7f913297832e85e0998ff66664b5"} Oct 11 05:47:52 crc kubenswrapper[4651]: I1011 05:47:52.476283 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lbnkg" event={"ID":"4544dbff-3a0a-4eb3-94f2-55961043d4d6","Type":"ContainerStarted","Data":"2e22fc57dfaf7c3e38a0a96602587e13eb755845d304cb7a319152f275e3e217"} Oct 11 05:47:53 crc kubenswrapper[4651]: I1011 05:47:53.492918 4651 generic.go:334] "Generic (PLEG): container finished" podID="4544dbff-3a0a-4eb3-94f2-55961043d4d6" containerID="2e22fc57dfaf7c3e38a0a96602587e13eb755845d304cb7a319152f275e3e217" exitCode=0 Oct 11 05:47:53 crc kubenswrapper[4651]: I1011 05:47:53.492991 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lbnkg" event={"ID":"4544dbff-3a0a-4eb3-94f2-55961043d4d6","Type":"ContainerDied","Data":"2e22fc57dfaf7c3e38a0a96602587e13eb755845d304cb7a319152f275e3e217"} Oct 11 05:47:54 crc kubenswrapper[4651]: I1011 05:47:54.519177 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lbnkg" event={"ID":"4544dbff-3a0a-4eb3-94f2-55961043d4d6","Type":"ContainerStarted","Data":"c71921a96e8dd0eb42253cf1d3f78b12f9f624df4ba5adc2a3884b422093d8cb"} Oct 11 05:47:54 crc kubenswrapper[4651]: I1011 05:47:54.545542 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-lbnkg" podStartSLOduration=3.127488608 podStartE2EDuration="5.545495679s" podCreationTimestamp="2025-10-11 05:47:49 +0000 UTC" firstStartedPulling="2025-10-11 05:47:51.468209001 +0000 UTC m=+3392.364441797" lastFinishedPulling="2025-10-11 05:47:53.886216072 +0000 UTC m=+3394.782448868" observedRunningTime="2025-10-11 05:47:54.535961641 +0000 UTC m=+3395.432194447" watchObservedRunningTime="2025-10-11 05:47:54.545495679 +0000 UTC m=+3395.441728485" Oct 11 05:47:55 crc kubenswrapper[4651]: I1011 05:47:55.373432 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-62f7l" Oct 11 05:47:55 crc kubenswrapper[4651]: I1011 05:47:55.428059 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-62f7l" Oct 11 05:47:56 crc kubenswrapper[4651]: I1011 05:47:56.705863 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-62f7l"] Oct 11 05:47:56 crc kubenswrapper[4651]: I1011 05:47:56.706717 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-62f7l" podUID="1958c7ba-76fb-47f8-b154-d114bdead35f" containerName="registry-server" containerID="cri-o://b81dc0be3e08033d5b3b26f0c537168a86ce8544995af40acc7ec04456aa510b" gracePeriod=2 Oct 11 05:47:57 crc kubenswrapper[4651]: I1011 05:47:57.253877 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-62f7l" Oct 11 05:47:57 crc kubenswrapper[4651]: I1011 05:47:57.376787 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cw826\" (UniqueName: \"kubernetes.io/projected/1958c7ba-76fb-47f8-b154-d114bdead35f-kube-api-access-cw826\") pod \"1958c7ba-76fb-47f8-b154-d114bdead35f\" (UID: \"1958c7ba-76fb-47f8-b154-d114bdead35f\") " Oct 11 05:47:57 crc kubenswrapper[4651]: I1011 05:47:57.377916 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1958c7ba-76fb-47f8-b154-d114bdead35f-catalog-content\") pod \"1958c7ba-76fb-47f8-b154-d114bdead35f\" (UID: \"1958c7ba-76fb-47f8-b154-d114bdead35f\") " Oct 11 05:47:57 crc kubenswrapper[4651]: I1011 05:47:57.378198 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1958c7ba-76fb-47f8-b154-d114bdead35f-utilities\") pod \"1958c7ba-76fb-47f8-b154-d114bdead35f\" (UID: \"1958c7ba-76fb-47f8-b154-d114bdead35f\") " Oct 11 05:47:57 crc kubenswrapper[4651]: I1011 05:47:57.379004 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1958c7ba-76fb-47f8-b154-d114bdead35f-utilities" (OuterVolumeSpecName: "utilities") pod "1958c7ba-76fb-47f8-b154-d114bdead35f" (UID: "1958c7ba-76fb-47f8-b154-d114bdead35f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 05:47:57 crc kubenswrapper[4651]: I1011 05:47:57.383181 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1958c7ba-76fb-47f8-b154-d114bdead35f-kube-api-access-cw826" (OuterVolumeSpecName: "kube-api-access-cw826") pod "1958c7ba-76fb-47f8-b154-d114bdead35f" (UID: "1958c7ba-76fb-47f8-b154-d114bdead35f"). InnerVolumeSpecName "kube-api-access-cw826". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:47:57 crc kubenswrapper[4651]: I1011 05:47:57.470773 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1958c7ba-76fb-47f8-b154-d114bdead35f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1958c7ba-76fb-47f8-b154-d114bdead35f" (UID: "1958c7ba-76fb-47f8-b154-d114bdead35f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 05:47:57 crc kubenswrapper[4651]: I1011 05:47:57.480501 4651 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1958c7ba-76fb-47f8-b154-d114bdead35f-utilities\") on node \"crc\" DevicePath \"\"" Oct 11 05:47:57 crc kubenswrapper[4651]: I1011 05:47:57.480536 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cw826\" (UniqueName: \"kubernetes.io/projected/1958c7ba-76fb-47f8-b154-d114bdead35f-kube-api-access-cw826\") on node \"crc\" DevicePath \"\"" Oct 11 05:47:57 crc kubenswrapper[4651]: I1011 05:47:57.480548 4651 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1958c7ba-76fb-47f8-b154-d114bdead35f-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 11 05:47:57 crc kubenswrapper[4651]: I1011 05:47:57.548320 4651 generic.go:334] "Generic (PLEG): container finished" podID="1958c7ba-76fb-47f8-b154-d114bdead35f" containerID="b81dc0be3e08033d5b3b26f0c537168a86ce8544995af40acc7ec04456aa510b" exitCode=0 Oct 11 05:47:57 crc kubenswrapper[4651]: I1011 05:47:57.548408 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-62f7l" event={"ID":"1958c7ba-76fb-47f8-b154-d114bdead35f","Type":"ContainerDied","Data":"b81dc0be3e08033d5b3b26f0c537168a86ce8544995af40acc7ec04456aa510b"} Oct 11 05:47:57 crc kubenswrapper[4651]: I1011 05:47:57.548510 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-62f7l" event={"ID":"1958c7ba-76fb-47f8-b154-d114bdead35f","Type":"ContainerDied","Data":"402af08bfa245b1409a3ed74eac9adbd8976d6bdc9489c0be8e623adc21e2caa"} Oct 11 05:47:57 crc kubenswrapper[4651]: I1011 05:47:57.548540 4651 scope.go:117] "RemoveContainer" containerID="b81dc0be3e08033d5b3b26f0c537168a86ce8544995af40acc7ec04456aa510b" Oct 11 05:47:57 crc kubenswrapper[4651]: I1011 05:47:57.548441 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-62f7l" Oct 11 05:47:57 crc kubenswrapper[4651]: I1011 05:47:57.584125 4651 scope.go:117] "RemoveContainer" containerID="e93c8a0174ce061be7e8592309ef198b00f0dd841755086aa7ed2587cbaa65ef" Oct 11 05:47:57 crc kubenswrapper[4651]: I1011 05:47:57.585071 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-62f7l"] Oct 11 05:47:57 crc kubenswrapper[4651]: I1011 05:47:57.594031 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-62f7l"] Oct 11 05:47:57 crc kubenswrapper[4651]: I1011 05:47:57.609187 4651 scope.go:117] "RemoveContainer" containerID="aafc52bc059f63c32a4f17ef4f29616362caf5af0cbe21746860d550e360ac2b" Oct 11 05:47:57 crc kubenswrapper[4651]: I1011 05:47:57.653578 4651 scope.go:117] "RemoveContainer" containerID="b81dc0be3e08033d5b3b26f0c537168a86ce8544995af40acc7ec04456aa510b" Oct 11 05:47:57 crc kubenswrapper[4651]: E1011 05:47:57.654111 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b81dc0be3e08033d5b3b26f0c537168a86ce8544995af40acc7ec04456aa510b\": container with ID starting with b81dc0be3e08033d5b3b26f0c537168a86ce8544995af40acc7ec04456aa510b not found: ID does not exist" containerID="b81dc0be3e08033d5b3b26f0c537168a86ce8544995af40acc7ec04456aa510b" Oct 11 05:47:57 crc kubenswrapper[4651]: I1011 05:47:57.654168 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b81dc0be3e08033d5b3b26f0c537168a86ce8544995af40acc7ec04456aa510b"} err="failed to get container status \"b81dc0be3e08033d5b3b26f0c537168a86ce8544995af40acc7ec04456aa510b\": rpc error: code = NotFound desc = could not find container \"b81dc0be3e08033d5b3b26f0c537168a86ce8544995af40acc7ec04456aa510b\": container with ID starting with b81dc0be3e08033d5b3b26f0c537168a86ce8544995af40acc7ec04456aa510b not found: ID does not exist" Oct 11 05:47:57 crc kubenswrapper[4651]: I1011 05:47:57.654192 4651 scope.go:117] "RemoveContainer" containerID="e93c8a0174ce061be7e8592309ef198b00f0dd841755086aa7ed2587cbaa65ef" Oct 11 05:47:57 crc kubenswrapper[4651]: E1011 05:47:57.654544 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e93c8a0174ce061be7e8592309ef198b00f0dd841755086aa7ed2587cbaa65ef\": container with ID starting with e93c8a0174ce061be7e8592309ef198b00f0dd841755086aa7ed2587cbaa65ef not found: ID does not exist" containerID="e93c8a0174ce061be7e8592309ef198b00f0dd841755086aa7ed2587cbaa65ef" Oct 11 05:47:57 crc kubenswrapper[4651]: I1011 05:47:57.654570 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e93c8a0174ce061be7e8592309ef198b00f0dd841755086aa7ed2587cbaa65ef"} err="failed to get container status \"e93c8a0174ce061be7e8592309ef198b00f0dd841755086aa7ed2587cbaa65ef\": rpc error: code = NotFound desc = could not find container \"e93c8a0174ce061be7e8592309ef198b00f0dd841755086aa7ed2587cbaa65ef\": container with ID starting with e93c8a0174ce061be7e8592309ef198b00f0dd841755086aa7ed2587cbaa65ef not found: ID does not exist" Oct 11 05:47:57 crc kubenswrapper[4651]: I1011 05:47:57.654586 4651 scope.go:117] "RemoveContainer" containerID="aafc52bc059f63c32a4f17ef4f29616362caf5af0cbe21746860d550e360ac2b" Oct 11 05:47:57 crc kubenswrapper[4651]: E1011 05:47:57.654871 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aafc52bc059f63c32a4f17ef4f29616362caf5af0cbe21746860d550e360ac2b\": container with ID starting with aafc52bc059f63c32a4f17ef4f29616362caf5af0cbe21746860d550e360ac2b not found: ID does not exist" containerID="aafc52bc059f63c32a4f17ef4f29616362caf5af0cbe21746860d550e360ac2b" Oct 11 05:47:57 crc kubenswrapper[4651]: I1011 05:47:57.654898 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aafc52bc059f63c32a4f17ef4f29616362caf5af0cbe21746860d550e360ac2b"} err="failed to get container status \"aafc52bc059f63c32a4f17ef4f29616362caf5af0cbe21746860d550e360ac2b\": rpc error: code = NotFound desc = could not find container \"aafc52bc059f63c32a4f17ef4f29616362caf5af0cbe21746860d550e360ac2b\": container with ID starting with aafc52bc059f63c32a4f17ef4f29616362caf5af0cbe21746860d550e360ac2b not found: ID does not exist" Oct 11 05:47:57 crc kubenswrapper[4651]: I1011 05:47:57.886550 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1958c7ba-76fb-47f8-b154-d114bdead35f" path="/var/lib/kubelet/pods/1958c7ba-76fb-47f8-b154-d114bdead35f/volumes" Oct 11 05:47:59 crc kubenswrapper[4651]: I1011 05:47:59.711290 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-lbnkg" Oct 11 05:47:59 crc kubenswrapper[4651]: I1011 05:47:59.711402 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-lbnkg" Oct 11 05:47:59 crc kubenswrapper[4651]: I1011 05:47:59.786544 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-lbnkg" Oct 11 05:48:00 crc kubenswrapper[4651]: I1011 05:48:00.641553 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-lbnkg" Oct 11 05:48:00 crc kubenswrapper[4651]: I1011 05:48:00.910189 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-lbnkg"] Oct 11 05:48:02 crc kubenswrapper[4651]: I1011 05:48:02.611645 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-lbnkg" podUID="4544dbff-3a0a-4eb3-94f2-55961043d4d6" containerName="registry-server" containerID="cri-o://c71921a96e8dd0eb42253cf1d3f78b12f9f624df4ba5adc2a3884b422093d8cb" gracePeriod=2 Oct 11 05:48:03 crc kubenswrapper[4651]: I1011 05:48:03.627330 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lbnkg" event={"ID":"4544dbff-3a0a-4eb3-94f2-55961043d4d6","Type":"ContainerDied","Data":"c71921a96e8dd0eb42253cf1d3f78b12f9f624df4ba5adc2a3884b422093d8cb"} Oct 11 05:48:03 crc kubenswrapper[4651]: I1011 05:48:03.627278 4651 generic.go:334] "Generic (PLEG): container finished" podID="4544dbff-3a0a-4eb3-94f2-55961043d4d6" containerID="c71921a96e8dd0eb42253cf1d3f78b12f9f624df4ba5adc2a3884b422093d8cb" exitCode=0 Oct 11 05:48:03 crc kubenswrapper[4651]: I1011 05:48:03.627947 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lbnkg" event={"ID":"4544dbff-3a0a-4eb3-94f2-55961043d4d6","Type":"ContainerDied","Data":"5c1173f3a6718d2a6a209e0e1b580a5bde3c1484092c29020b1055697703eedf"} Oct 11 05:48:03 crc kubenswrapper[4651]: I1011 05:48:03.627978 4651 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5c1173f3a6718d2a6a209e0e1b580a5bde3c1484092c29020b1055697703eedf" Oct 11 05:48:03 crc kubenswrapper[4651]: I1011 05:48:03.730155 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-lbnkg" Oct 11 05:48:03 crc kubenswrapper[4651]: I1011 05:48:03.835915 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4544dbff-3a0a-4eb3-94f2-55961043d4d6-catalog-content\") pod \"4544dbff-3a0a-4eb3-94f2-55961043d4d6\" (UID: \"4544dbff-3a0a-4eb3-94f2-55961043d4d6\") " Oct 11 05:48:03 crc kubenswrapper[4651]: I1011 05:48:03.836009 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4544dbff-3a0a-4eb3-94f2-55961043d4d6-utilities\") pod \"4544dbff-3a0a-4eb3-94f2-55961043d4d6\" (UID: \"4544dbff-3a0a-4eb3-94f2-55961043d4d6\") " Oct 11 05:48:03 crc kubenswrapper[4651]: I1011 05:48:03.837082 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tptj2\" (UniqueName: \"kubernetes.io/projected/4544dbff-3a0a-4eb3-94f2-55961043d4d6-kube-api-access-tptj2\") pod \"4544dbff-3a0a-4eb3-94f2-55961043d4d6\" (UID: \"4544dbff-3a0a-4eb3-94f2-55961043d4d6\") " Oct 11 05:48:03 crc kubenswrapper[4651]: I1011 05:48:03.837654 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4544dbff-3a0a-4eb3-94f2-55961043d4d6-utilities" (OuterVolumeSpecName: "utilities") pod "4544dbff-3a0a-4eb3-94f2-55961043d4d6" (UID: "4544dbff-3a0a-4eb3-94f2-55961043d4d6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 05:48:03 crc kubenswrapper[4651]: I1011 05:48:03.838007 4651 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4544dbff-3a0a-4eb3-94f2-55961043d4d6-utilities\") on node \"crc\" DevicePath \"\"" Oct 11 05:48:03 crc kubenswrapper[4651]: I1011 05:48:03.846254 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4544dbff-3a0a-4eb3-94f2-55961043d4d6-kube-api-access-tptj2" (OuterVolumeSpecName: "kube-api-access-tptj2") pod "4544dbff-3a0a-4eb3-94f2-55961043d4d6" (UID: "4544dbff-3a0a-4eb3-94f2-55961043d4d6"). InnerVolumeSpecName "kube-api-access-tptj2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:48:03 crc kubenswrapper[4651]: I1011 05:48:03.917304 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4544dbff-3a0a-4eb3-94f2-55961043d4d6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4544dbff-3a0a-4eb3-94f2-55961043d4d6" (UID: "4544dbff-3a0a-4eb3-94f2-55961043d4d6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 05:48:03 crc kubenswrapper[4651]: I1011 05:48:03.940685 4651 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4544dbff-3a0a-4eb3-94f2-55961043d4d6-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 11 05:48:03 crc kubenswrapper[4651]: I1011 05:48:03.941733 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tptj2\" (UniqueName: \"kubernetes.io/projected/4544dbff-3a0a-4eb3-94f2-55961043d4d6-kube-api-access-tptj2\") on node \"crc\" DevicePath \"\"" Oct 11 05:48:04 crc kubenswrapper[4651]: I1011 05:48:04.639049 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-lbnkg" Oct 11 05:48:04 crc kubenswrapper[4651]: I1011 05:48:04.686846 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-lbnkg"] Oct 11 05:48:04 crc kubenswrapper[4651]: I1011 05:48:04.701515 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-lbnkg"] Oct 11 05:48:05 crc kubenswrapper[4651]: I1011 05:48:05.883415 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4544dbff-3a0a-4eb3-94f2-55961043d4d6" path="/var/lib/kubelet/pods/4544dbff-3a0a-4eb3-94f2-55961043d4d6/volumes" Oct 11 05:48:16 crc kubenswrapper[4651]: I1011 05:48:16.310927 4651 patch_prober.go:28] interesting pod/machine-config-daemon-78jnv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 05:48:16 crc kubenswrapper[4651]: I1011 05:48:16.311537 4651 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 05:48:46 crc kubenswrapper[4651]: I1011 05:48:46.310533 4651 patch_prober.go:28] interesting pod/machine-config-daemon-78jnv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 05:48:46 crc kubenswrapper[4651]: I1011 05:48:46.311172 4651 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 05:48:46 crc kubenswrapper[4651]: I1011 05:48:46.311236 4651 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" Oct 11 05:48:46 crc kubenswrapper[4651]: I1011 05:48:46.312229 4651 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"1098930e9e839a15ff635912f8f330c5224821ed8362696a83b86f2f3eaded43"} pod="openshift-machine-config-operator/machine-config-daemon-78jnv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 11 05:48:46 crc kubenswrapper[4651]: I1011 05:48:46.312314 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" containerName="machine-config-daemon" containerID="cri-o://1098930e9e839a15ff635912f8f330c5224821ed8362696a83b86f2f3eaded43" gracePeriod=600 Oct 11 05:48:47 crc kubenswrapper[4651]: I1011 05:48:47.124750 4651 generic.go:334] "Generic (PLEG): container finished" podID="519a1ae1-e964-48b0-8b61-835146df28c1" containerID="1098930e9e839a15ff635912f8f330c5224821ed8362696a83b86f2f3eaded43" exitCode=0 Oct 11 05:48:47 crc kubenswrapper[4651]: I1011 05:48:47.124861 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" event={"ID":"519a1ae1-e964-48b0-8b61-835146df28c1","Type":"ContainerDied","Data":"1098930e9e839a15ff635912f8f330c5224821ed8362696a83b86f2f3eaded43"} Oct 11 05:48:47 crc kubenswrapper[4651]: I1011 05:48:47.125547 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" event={"ID":"519a1ae1-e964-48b0-8b61-835146df28c1","Type":"ContainerStarted","Data":"c912b3bc11354e42d7455ad2000403f0825b71426687440ea00315847957d4d3"} Oct 11 05:48:47 crc kubenswrapper[4651]: I1011 05:48:47.125584 4651 scope.go:117] "RemoveContainer" containerID="4f21ee74124734e4ae45f5cd30d3ff201ade9b3ffdf1481c78ccc8b374007081" Oct 11 05:50:13 crc kubenswrapper[4651]: I1011 05:50:13.142061 4651 generic.go:334] "Generic (PLEG): container finished" podID="76b8d472-5f4e-4d97-be15-0f5be51acd85" containerID="9892b94c05e15011ecc833b92d8df53c89af4075a96bcb6f9de3b6ff25cabe82" exitCode=0 Oct 11 05:50:13 crc kubenswrapper[4651]: I1011 05:50:13.142195 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"76b8d472-5f4e-4d97-be15-0f5be51acd85","Type":"ContainerDied","Data":"9892b94c05e15011ecc833b92d8df53c89af4075a96bcb6f9de3b6ff25cabe82"} Oct 11 05:50:14 crc kubenswrapper[4651]: I1011 05:50:14.662711 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Oct 11 05:50:14 crc kubenswrapper[4651]: I1011 05:50:14.762339 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-logs\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"76b8d472-5f4e-4d97-be15-0f5be51acd85\" (UID: \"76b8d472-5f4e-4d97-be15-0f5be51acd85\") " Oct 11 05:50:14 crc kubenswrapper[4651]: I1011 05:50:14.762491 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/76b8d472-5f4e-4d97-be15-0f5be51acd85-openstack-config-secret\") pod \"76b8d472-5f4e-4d97-be15-0f5be51acd85\" (UID: \"76b8d472-5f4e-4d97-be15-0f5be51acd85\") " Oct 11 05:50:14 crc kubenswrapper[4651]: I1011 05:50:14.762548 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/76b8d472-5f4e-4d97-be15-0f5be51acd85-test-operator-ephemeral-workdir\") pod \"76b8d472-5f4e-4d97-be15-0f5be51acd85\" (UID: \"76b8d472-5f4e-4d97-be15-0f5be51acd85\") " Oct 11 05:50:14 crc kubenswrapper[4651]: I1011 05:50:14.762677 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/76b8d472-5f4e-4d97-be15-0f5be51acd85-ssh-key\") pod \"76b8d472-5f4e-4d97-be15-0f5be51acd85\" (UID: \"76b8d472-5f4e-4d97-be15-0f5be51acd85\") " Oct 11 05:50:14 crc kubenswrapper[4651]: I1011 05:50:14.762737 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rrx2f\" (UniqueName: \"kubernetes.io/projected/76b8d472-5f4e-4d97-be15-0f5be51acd85-kube-api-access-rrx2f\") pod \"76b8d472-5f4e-4d97-be15-0f5be51acd85\" (UID: \"76b8d472-5f4e-4d97-be15-0f5be51acd85\") " Oct 11 05:50:14 crc kubenswrapper[4651]: I1011 05:50:14.762862 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/76b8d472-5f4e-4d97-be15-0f5be51acd85-ca-certs\") pod \"76b8d472-5f4e-4d97-be15-0f5be51acd85\" (UID: \"76b8d472-5f4e-4d97-be15-0f5be51acd85\") " Oct 11 05:50:14 crc kubenswrapper[4651]: I1011 05:50:14.763006 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/76b8d472-5f4e-4d97-be15-0f5be51acd85-test-operator-ephemeral-temporary\") pod \"76b8d472-5f4e-4d97-be15-0f5be51acd85\" (UID: \"76b8d472-5f4e-4d97-be15-0f5be51acd85\") " Oct 11 05:50:14 crc kubenswrapper[4651]: I1011 05:50:14.763053 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/76b8d472-5f4e-4d97-be15-0f5be51acd85-openstack-config\") pod \"76b8d472-5f4e-4d97-be15-0f5be51acd85\" (UID: \"76b8d472-5f4e-4d97-be15-0f5be51acd85\") " Oct 11 05:50:14 crc kubenswrapper[4651]: I1011 05:50:14.763142 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/76b8d472-5f4e-4d97-be15-0f5be51acd85-config-data\") pod \"76b8d472-5f4e-4d97-be15-0f5be51acd85\" (UID: \"76b8d472-5f4e-4d97-be15-0f5be51acd85\") " Oct 11 05:50:14 crc kubenswrapper[4651]: I1011 05:50:14.764203 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/76b8d472-5f4e-4d97-be15-0f5be51acd85-test-operator-ephemeral-temporary" (OuterVolumeSpecName: "test-operator-ephemeral-temporary") pod "76b8d472-5f4e-4d97-be15-0f5be51acd85" (UID: "76b8d472-5f4e-4d97-be15-0f5be51acd85"). InnerVolumeSpecName "test-operator-ephemeral-temporary". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 05:50:14 crc kubenswrapper[4651]: I1011 05:50:14.765024 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/76b8d472-5f4e-4d97-be15-0f5be51acd85-config-data" (OuterVolumeSpecName: "config-data") pod "76b8d472-5f4e-4d97-be15-0f5be51acd85" (UID: "76b8d472-5f4e-4d97-be15-0f5be51acd85"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:50:14 crc kubenswrapper[4651]: I1011 05:50:14.789166 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage04-crc" (OuterVolumeSpecName: "test-operator-logs") pod "76b8d472-5f4e-4d97-be15-0f5be51acd85" (UID: "76b8d472-5f4e-4d97-be15-0f5be51acd85"). InnerVolumeSpecName "local-storage04-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 11 05:50:14 crc kubenswrapper[4651]: I1011 05:50:14.789228 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/76b8d472-5f4e-4d97-be15-0f5be51acd85-kube-api-access-rrx2f" (OuterVolumeSpecName: "kube-api-access-rrx2f") pod "76b8d472-5f4e-4d97-be15-0f5be51acd85" (UID: "76b8d472-5f4e-4d97-be15-0f5be51acd85"). InnerVolumeSpecName "kube-api-access-rrx2f". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:50:14 crc kubenswrapper[4651]: I1011 05:50:14.792052 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/76b8d472-5f4e-4d97-be15-0f5be51acd85-test-operator-ephemeral-workdir" (OuterVolumeSpecName: "test-operator-ephemeral-workdir") pod "76b8d472-5f4e-4d97-be15-0f5be51acd85" (UID: "76b8d472-5f4e-4d97-be15-0f5be51acd85"). InnerVolumeSpecName "test-operator-ephemeral-workdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 05:50:14 crc kubenswrapper[4651]: I1011 05:50:14.803057 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/76b8d472-5f4e-4d97-be15-0f5be51acd85-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "76b8d472-5f4e-4d97-be15-0f5be51acd85" (UID: "76b8d472-5f4e-4d97-be15-0f5be51acd85"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:50:14 crc kubenswrapper[4651]: I1011 05:50:14.814956 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/76b8d472-5f4e-4d97-be15-0f5be51acd85-ca-certs" (OuterVolumeSpecName: "ca-certs") pod "76b8d472-5f4e-4d97-be15-0f5be51acd85" (UID: "76b8d472-5f4e-4d97-be15-0f5be51acd85"). InnerVolumeSpecName "ca-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:50:14 crc kubenswrapper[4651]: I1011 05:50:14.819998 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/76b8d472-5f4e-4d97-be15-0f5be51acd85-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "76b8d472-5f4e-4d97-be15-0f5be51acd85" (UID: "76b8d472-5f4e-4d97-be15-0f5be51acd85"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 05:50:14 crc kubenswrapper[4651]: I1011 05:50:14.842720 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/76b8d472-5f4e-4d97-be15-0f5be51acd85-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "76b8d472-5f4e-4d97-be15-0f5be51acd85" (UID: "76b8d472-5f4e-4d97-be15-0f5be51acd85"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 05:50:14 crc kubenswrapper[4651]: I1011 05:50:14.866394 4651 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/76b8d472-5f4e-4d97-be15-0f5be51acd85-test-operator-ephemeral-workdir\") on node \"crc\" DevicePath \"\"" Oct 11 05:50:14 crc kubenswrapper[4651]: I1011 05:50:14.866439 4651 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/76b8d472-5f4e-4d97-be15-0f5be51acd85-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 11 05:50:14 crc kubenswrapper[4651]: I1011 05:50:14.866455 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rrx2f\" (UniqueName: \"kubernetes.io/projected/76b8d472-5f4e-4d97-be15-0f5be51acd85-kube-api-access-rrx2f\") on node \"crc\" DevicePath \"\"" Oct 11 05:50:14 crc kubenswrapper[4651]: I1011 05:50:14.866473 4651 reconciler_common.go:293] "Volume detached for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/76b8d472-5f4e-4d97-be15-0f5be51acd85-ca-certs\") on node \"crc\" DevicePath \"\"" Oct 11 05:50:14 crc kubenswrapper[4651]: I1011 05:50:14.866489 4651 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/76b8d472-5f4e-4d97-be15-0f5be51acd85-test-operator-ephemeral-temporary\") on node \"crc\" DevicePath \"\"" Oct 11 05:50:14 crc kubenswrapper[4651]: I1011 05:50:14.866509 4651 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/76b8d472-5f4e-4d97-be15-0f5be51acd85-openstack-config\") on node \"crc\" DevicePath \"\"" Oct 11 05:50:14 crc kubenswrapper[4651]: I1011 05:50:14.866530 4651 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/76b8d472-5f4e-4d97-be15-0f5be51acd85-config-data\") on node \"crc\" DevicePath \"\"" Oct 11 05:50:14 crc kubenswrapper[4651]: I1011 05:50:14.866564 4651 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" " Oct 11 05:50:14 crc kubenswrapper[4651]: I1011 05:50:14.866579 4651 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/76b8d472-5f4e-4d97-be15-0f5be51acd85-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Oct 11 05:50:14 crc kubenswrapper[4651]: I1011 05:50:14.907726 4651 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage04-crc" (UniqueName: "kubernetes.io/local-volume/local-storage04-crc") on node "crc" Oct 11 05:50:14 crc kubenswrapper[4651]: I1011 05:50:14.968302 4651 reconciler_common.go:293] "Volume detached for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" DevicePath \"\"" Oct 11 05:50:15 crc kubenswrapper[4651]: I1011 05:50:15.187297 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"76b8d472-5f4e-4d97-be15-0f5be51acd85","Type":"ContainerDied","Data":"3b3887f270fc0f921fadfcae1d12dacbcede15296f2f17225a364dd07503b6c6"} Oct 11 05:50:15 crc kubenswrapper[4651]: I1011 05:50:15.187380 4651 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3b3887f270fc0f921fadfcae1d12dacbcede15296f2f17225a364dd07503b6c6" Oct 11 05:50:15 crc kubenswrapper[4651]: I1011 05:50:15.187458 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Oct 11 05:50:26 crc kubenswrapper[4651]: I1011 05:50:26.844566 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Oct 11 05:50:26 crc kubenswrapper[4651]: E1011 05:50:26.845893 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1958c7ba-76fb-47f8-b154-d114bdead35f" containerName="registry-server" Oct 11 05:50:26 crc kubenswrapper[4651]: I1011 05:50:26.845917 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="1958c7ba-76fb-47f8-b154-d114bdead35f" containerName="registry-server" Oct 11 05:50:26 crc kubenswrapper[4651]: E1011 05:50:26.845941 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76b8d472-5f4e-4d97-be15-0f5be51acd85" containerName="tempest-tests-tempest-tests-runner" Oct 11 05:50:26 crc kubenswrapper[4651]: I1011 05:50:26.845951 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="76b8d472-5f4e-4d97-be15-0f5be51acd85" containerName="tempest-tests-tempest-tests-runner" Oct 11 05:50:26 crc kubenswrapper[4651]: E1011 05:50:26.845963 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1958c7ba-76fb-47f8-b154-d114bdead35f" containerName="extract-utilities" Oct 11 05:50:26 crc kubenswrapper[4651]: I1011 05:50:26.845971 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="1958c7ba-76fb-47f8-b154-d114bdead35f" containerName="extract-utilities" Oct 11 05:50:26 crc kubenswrapper[4651]: E1011 05:50:26.845998 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4544dbff-3a0a-4eb3-94f2-55961043d4d6" containerName="extract-content" Oct 11 05:50:26 crc kubenswrapper[4651]: I1011 05:50:26.846006 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="4544dbff-3a0a-4eb3-94f2-55961043d4d6" containerName="extract-content" Oct 11 05:50:26 crc kubenswrapper[4651]: E1011 05:50:26.846027 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4544dbff-3a0a-4eb3-94f2-55961043d4d6" containerName="registry-server" Oct 11 05:50:26 crc kubenswrapper[4651]: I1011 05:50:26.846037 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="4544dbff-3a0a-4eb3-94f2-55961043d4d6" containerName="registry-server" Oct 11 05:50:26 crc kubenswrapper[4651]: E1011 05:50:26.846058 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4544dbff-3a0a-4eb3-94f2-55961043d4d6" containerName="extract-utilities" Oct 11 05:50:26 crc kubenswrapper[4651]: I1011 05:50:26.846068 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="4544dbff-3a0a-4eb3-94f2-55961043d4d6" containerName="extract-utilities" Oct 11 05:50:26 crc kubenswrapper[4651]: E1011 05:50:26.846080 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1958c7ba-76fb-47f8-b154-d114bdead35f" containerName="extract-content" Oct 11 05:50:26 crc kubenswrapper[4651]: I1011 05:50:26.846088 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="1958c7ba-76fb-47f8-b154-d114bdead35f" containerName="extract-content" Oct 11 05:50:26 crc kubenswrapper[4651]: I1011 05:50:26.846317 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="76b8d472-5f4e-4d97-be15-0f5be51acd85" containerName="tempest-tests-tempest-tests-runner" Oct 11 05:50:26 crc kubenswrapper[4651]: I1011 05:50:26.846341 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="1958c7ba-76fb-47f8-b154-d114bdead35f" containerName="registry-server" Oct 11 05:50:26 crc kubenswrapper[4651]: I1011 05:50:26.846370 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="4544dbff-3a0a-4eb3-94f2-55961043d4d6" containerName="registry-server" Oct 11 05:50:26 crc kubenswrapper[4651]: I1011 05:50:26.847250 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 11 05:50:26 crc kubenswrapper[4651]: I1011 05:50:26.851981 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-r2cjc" Oct 11 05:50:26 crc kubenswrapper[4651]: I1011 05:50:26.860293 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Oct 11 05:50:26 crc kubenswrapper[4651]: I1011 05:50:26.966317 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"87851c60-a63d-43bd-a248-6dad36680eed\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 11 05:50:26 crc kubenswrapper[4651]: I1011 05:50:26.966463 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dwzbz\" (UniqueName: \"kubernetes.io/projected/87851c60-a63d-43bd-a248-6dad36680eed-kube-api-access-dwzbz\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"87851c60-a63d-43bd-a248-6dad36680eed\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 11 05:50:27 crc kubenswrapper[4651]: I1011 05:50:27.068847 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dwzbz\" (UniqueName: \"kubernetes.io/projected/87851c60-a63d-43bd-a248-6dad36680eed-kube-api-access-dwzbz\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"87851c60-a63d-43bd-a248-6dad36680eed\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 11 05:50:27 crc kubenswrapper[4651]: I1011 05:50:27.069048 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"87851c60-a63d-43bd-a248-6dad36680eed\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 11 05:50:27 crc kubenswrapper[4651]: I1011 05:50:27.069497 4651 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"87851c60-a63d-43bd-a248-6dad36680eed\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 11 05:50:27 crc kubenswrapper[4651]: I1011 05:50:27.105074 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dwzbz\" (UniqueName: \"kubernetes.io/projected/87851c60-a63d-43bd-a248-6dad36680eed-kube-api-access-dwzbz\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"87851c60-a63d-43bd-a248-6dad36680eed\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 11 05:50:27 crc kubenswrapper[4651]: I1011 05:50:27.109379 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"87851c60-a63d-43bd-a248-6dad36680eed\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 11 05:50:27 crc kubenswrapper[4651]: I1011 05:50:27.174498 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 11 05:50:27 crc kubenswrapper[4651]: I1011 05:50:27.691843 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Oct 11 05:50:27 crc kubenswrapper[4651]: W1011 05:50:27.702103 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod87851c60_a63d_43bd_a248_6dad36680eed.slice/crio-61d1c7cbc28c55c2627caac3a56d6bfb141c827d6f77217850914c27af276ead WatchSource:0}: Error finding container 61d1c7cbc28c55c2627caac3a56d6bfb141c827d6f77217850914c27af276ead: Status 404 returned error can't find the container with id 61d1c7cbc28c55c2627caac3a56d6bfb141c827d6f77217850914c27af276ead Oct 11 05:50:28 crc kubenswrapper[4651]: I1011 05:50:28.348951 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"87851c60-a63d-43bd-a248-6dad36680eed","Type":"ContainerStarted","Data":"61d1c7cbc28c55c2627caac3a56d6bfb141c827d6f77217850914c27af276ead"} Oct 11 05:50:29 crc kubenswrapper[4651]: I1011 05:50:29.361664 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"87851c60-a63d-43bd-a248-6dad36680eed","Type":"ContainerStarted","Data":"b7f49fdeb5f5d77efec2b936087d3fc9a327f69dcc236396027639a1da46653e"} Oct 11 05:50:29 crc kubenswrapper[4651]: I1011 05:50:29.392012 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" podStartSLOduration=2.220563915 podStartE2EDuration="3.391991572s" podCreationTimestamp="2025-10-11 05:50:26 +0000 UTC" firstStartedPulling="2025-10-11 05:50:27.704758971 +0000 UTC m=+3548.600991797" lastFinishedPulling="2025-10-11 05:50:28.876186658 +0000 UTC m=+3549.772419454" observedRunningTime="2025-10-11 05:50:29.381291943 +0000 UTC m=+3550.277524789" watchObservedRunningTime="2025-10-11 05:50:29.391991572 +0000 UTC m=+3550.288224378" Oct 11 05:50:45 crc kubenswrapper[4651]: I1011 05:50:45.069249 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-f85lr"] Oct 11 05:50:45 crc kubenswrapper[4651]: I1011 05:50:45.072726 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-f85lr" Oct 11 05:50:45 crc kubenswrapper[4651]: I1011 05:50:45.085369 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-f85lr"] Oct 11 05:50:45 crc kubenswrapper[4651]: I1011 05:50:45.156296 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zgf6k\" (UniqueName: \"kubernetes.io/projected/c41ed47f-18da-4b3d-a48b-46d373f609eb-kube-api-access-zgf6k\") pod \"certified-operators-f85lr\" (UID: \"c41ed47f-18da-4b3d-a48b-46d373f609eb\") " pod="openshift-marketplace/certified-operators-f85lr" Oct 11 05:50:45 crc kubenswrapper[4651]: I1011 05:50:45.156473 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c41ed47f-18da-4b3d-a48b-46d373f609eb-catalog-content\") pod \"certified-operators-f85lr\" (UID: \"c41ed47f-18da-4b3d-a48b-46d373f609eb\") " pod="openshift-marketplace/certified-operators-f85lr" Oct 11 05:50:45 crc kubenswrapper[4651]: I1011 05:50:45.156566 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c41ed47f-18da-4b3d-a48b-46d373f609eb-utilities\") pod \"certified-operators-f85lr\" (UID: \"c41ed47f-18da-4b3d-a48b-46d373f609eb\") " pod="openshift-marketplace/certified-operators-f85lr" Oct 11 05:50:45 crc kubenswrapper[4651]: I1011 05:50:45.258493 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c41ed47f-18da-4b3d-a48b-46d373f609eb-utilities\") pod \"certified-operators-f85lr\" (UID: \"c41ed47f-18da-4b3d-a48b-46d373f609eb\") " pod="openshift-marketplace/certified-operators-f85lr" Oct 11 05:50:45 crc kubenswrapper[4651]: I1011 05:50:45.258598 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zgf6k\" (UniqueName: \"kubernetes.io/projected/c41ed47f-18da-4b3d-a48b-46d373f609eb-kube-api-access-zgf6k\") pod \"certified-operators-f85lr\" (UID: \"c41ed47f-18da-4b3d-a48b-46d373f609eb\") " pod="openshift-marketplace/certified-operators-f85lr" Oct 11 05:50:45 crc kubenswrapper[4651]: I1011 05:50:45.258696 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c41ed47f-18da-4b3d-a48b-46d373f609eb-catalog-content\") pod \"certified-operators-f85lr\" (UID: \"c41ed47f-18da-4b3d-a48b-46d373f609eb\") " pod="openshift-marketplace/certified-operators-f85lr" Oct 11 05:50:45 crc kubenswrapper[4651]: I1011 05:50:45.259192 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c41ed47f-18da-4b3d-a48b-46d373f609eb-utilities\") pod \"certified-operators-f85lr\" (UID: \"c41ed47f-18da-4b3d-a48b-46d373f609eb\") " pod="openshift-marketplace/certified-operators-f85lr" Oct 11 05:50:45 crc kubenswrapper[4651]: I1011 05:50:45.259225 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c41ed47f-18da-4b3d-a48b-46d373f609eb-catalog-content\") pod \"certified-operators-f85lr\" (UID: \"c41ed47f-18da-4b3d-a48b-46d373f609eb\") " pod="openshift-marketplace/certified-operators-f85lr" Oct 11 05:50:45 crc kubenswrapper[4651]: I1011 05:50:45.288360 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zgf6k\" (UniqueName: \"kubernetes.io/projected/c41ed47f-18da-4b3d-a48b-46d373f609eb-kube-api-access-zgf6k\") pod \"certified-operators-f85lr\" (UID: \"c41ed47f-18da-4b3d-a48b-46d373f609eb\") " pod="openshift-marketplace/certified-operators-f85lr" Oct 11 05:50:45 crc kubenswrapper[4651]: I1011 05:50:45.416084 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-f85lr" Oct 11 05:50:45 crc kubenswrapper[4651]: I1011 05:50:45.992042 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-f85lr"] Oct 11 05:50:46 crc kubenswrapper[4651]: I1011 05:50:46.287184 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-sxmt5/must-gather-chfjv"] Oct 11 05:50:46 crc kubenswrapper[4651]: I1011 05:50:46.289015 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-sxmt5/must-gather-chfjv" Oct 11 05:50:46 crc kubenswrapper[4651]: I1011 05:50:46.291914 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-sxmt5"/"default-dockercfg-kh298" Oct 11 05:50:46 crc kubenswrapper[4651]: I1011 05:50:46.292130 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-sxmt5"/"openshift-service-ca.crt" Oct 11 05:50:46 crc kubenswrapper[4651]: I1011 05:50:46.292255 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-sxmt5"/"kube-root-ca.crt" Oct 11 05:50:46 crc kubenswrapper[4651]: I1011 05:50:46.296683 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-sxmt5/must-gather-chfjv"] Oct 11 05:50:46 crc kubenswrapper[4651]: I1011 05:50:46.310759 4651 patch_prober.go:28] interesting pod/machine-config-daemon-78jnv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 05:50:46 crc kubenswrapper[4651]: I1011 05:50:46.310812 4651 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 05:50:46 crc kubenswrapper[4651]: I1011 05:50:46.383354 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/0fcf22ea-ee20-47e0-8840-d6a6a1222773-must-gather-output\") pod \"must-gather-chfjv\" (UID: \"0fcf22ea-ee20-47e0-8840-d6a6a1222773\") " pod="openshift-must-gather-sxmt5/must-gather-chfjv" Oct 11 05:50:46 crc kubenswrapper[4651]: I1011 05:50:46.383409 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w4kcf\" (UniqueName: \"kubernetes.io/projected/0fcf22ea-ee20-47e0-8840-d6a6a1222773-kube-api-access-w4kcf\") pod \"must-gather-chfjv\" (UID: \"0fcf22ea-ee20-47e0-8840-d6a6a1222773\") " pod="openshift-must-gather-sxmt5/must-gather-chfjv" Oct 11 05:50:46 crc kubenswrapper[4651]: I1011 05:50:46.484897 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/0fcf22ea-ee20-47e0-8840-d6a6a1222773-must-gather-output\") pod \"must-gather-chfjv\" (UID: \"0fcf22ea-ee20-47e0-8840-d6a6a1222773\") " pod="openshift-must-gather-sxmt5/must-gather-chfjv" Oct 11 05:50:46 crc kubenswrapper[4651]: I1011 05:50:46.485351 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w4kcf\" (UniqueName: \"kubernetes.io/projected/0fcf22ea-ee20-47e0-8840-d6a6a1222773-kube-api-access-w4kcf\") pod \"must-gather-chfjv\" (UID: \"0fcf22ea-ee20-47e0-8840-d6a6a1222773\") " pod="openshift-must-gather-sxmt5/must-gather-chfjv" Oct 11 05:50:46 crc kubenswrapper[4651]: I1011 05:50:46.485580 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/0fcf22ea-ee20-47e0-8840-d6a6a1222773-must-gather-output\") pod \"must-gather-chfjv\" (UID: \"0fcf22ea-ee20-47e0-8840-d6a6a1222773\") " pod="openshift-must-gather-sxmt5/must-gather-chfjv" Oct 11 05:50:46 crc kubenswrapper[4651]: I1011 05:50:46.506001 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w4kcf\" (UniqueName: \"kubernetes.io/projected/0fcf22ea-ee20-47e0-8840-d6a6a1222773-kube-api-access-w4kcf\") pod \"must-gather-chfjv\" (UID: \"0fcf22ea-ee20-47e0-8840-d6a6a1222773\") " pod="openshift-must-gather-sxmt5/must-gather-chfjv" Oct 11 05:50:46 crc kubenswrapper[4651]: I1011 05:50:46.561810 4651 generic.go:334] "Generic (PLEG): container finished" podID="c41ed47f-18da-4b3d-a48b-46d373f609eb" containerID="a79e3d09475ff64e5e54150253e92caa4c8f0da62f2818b95086c94974a24164" exitCode=0 Oct 11 05:50:46 crc kubenswrapper[4651]: I1011 05:50:46.562088 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-f85lr" event={"ID":"c41ed47f-18da-4b3d-a48b-46d373f609eb","Type":"ContainerDied","Data":"a79e3d09475ff64e5e54150253e92caa4c8f0da62f2818b95086c94974a24164"} Oct 11 05:50:46 crc kubenswrapper[4651]: I1011 05:50:46.562241 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-f85lr" event={"ID":"c41ed47f-18da-4b3d-a48b-46d373f609eb","Type":"ContainerStarted","Data":"70f91fdfc8ff1064f0906df5d51b69aaa96ce946953f91b8dde47129ff95a137"} Oct 11 05:50:46 crc kubenswrapper[4651]: I1011 05:50:46.645906 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-sxmt5/must-gather-chfjv" Oct 11 05:50:47 crc kubenswrapper[4651]: I1011 05:50:47.705733 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-sxmt5/must-gather-chfjv"] Oct 11 05:50:47 crc kubenswrapper[4651]: W1011 05:50:47.711493 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0fcf22ea_ee20_47e0_8840_d6a6a1222773.slice/crio-757561ac4831cdce9797fa524cfc49af33570f17f18c99f930272709d39c884d WatchSource:0}: Error finding container 757561ac4831cdce9797fa524cfc49af33570f17f18c99f930272709d39c884d: Status 404 returned error can't find the container with id 757561ac4831cdce9797fa524cfc49af33570f17f18c99f930272709d39c884d Oct 11 05:50:48 crc kubenswrapper[4651]: I1011 05:50:48.583993 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-sxmt5/must-gather-chfjv" event={"ID":"0fcf22ea-ee20-47e0-8840-d6a6a1222773","Type":"ContainerStarted","Data":"757561ac4831cdce9797fa524cfc49af33570f17f18c99f930272709d39c884d"} Oct 11 05:50:48 crc kubenswrapper[4651]: I1011 05:50:48.586797 4651 generic.go:334] "Generic (PLEG): container finished" podID="c41ed47f-18da-4b3d-a48b-46d373f609eb" containerID="42f3536a1a310503defd1de4de0fd086dc53d9085614ff5a255e0dc1a18b2670" exitCode=0 Oct 11 05:50:48 crc kubenswrapper[4651]: I1011 05:50:48.586862 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-f85lr" event={"ID":"c41ed47f-18da-4b3d-a48b-46d373f609eb","Type":"ContainerDied","Data":"42f3536a1a310503defd1de4de0fd086dc53d9085614ff5a255e0dc1a18b2670"} Oct 11 05:50:49 crc kubenswrapper[4651]: I1011 05:50:49.600821 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-f85lr" event={"ID":"c41ed47f-18da-4b3d-a48b-46d373f609eb","Type":"ContainerStarted","Data":"7be2016fa07307602c26b214d70a7387ff55024a7b2de6a95bbd5dcc94d1f7db"} Oct 11 05:50:49 crc kubenswrapper[4651]: I1011 05:50:49.620816 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-f85lr" podStartSLOduration=2.075588766 podStartE2EDuration="4.620799494s" podCreationTimestamp="2025-10-11 05:50:45 +0000 UTC" firstStartedPulling="2025-10-11 05:50:46.563898325 +0000 UTC m=+3567.460131121" lastFinishedPulling="2025-10-11 05:50:49.109109053 +0000 UTC m=+3570.005341849" observedRunningTime="2025-10-11 05:50:49.617789736 +0000 UTC m=+3570.514022562" watchObservedRunningTime="2025-10-11 05:50:49.620799494 +0000 UTC m=+3570.517032290" Oct 11 05:50:55 crc kubenswrapper[4651]: I1011 05:50:55.416330 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-f85lr" Oct 11 05:50:55 crc kubenswrapper[4651]: I1011 05:50:55.416998 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-f85lr" Oct 11 05:50:55 crc kubenswrapper[4651]: I1011 05:50:55.500506 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-f85lr" Oct 11 05:50:55 crc kubenswrapper[4651]: I1011 05:50:55.677912 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-sxmt5/must-gather-chfjv" event={"ID":"0fcf22ea-ee20-47e0-8840-d6a6a1222773","Type":"ContainerStarted","Data":"afc50b5fe309dbdc7cd726a76123ad20a4b928cd9f5a2fb5d90b035577a082ef"} Oct 11 05:50:55 crc kubenswrapper[4651]: I1011 05:50:55.677987 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-sxmt5/must-gather-chfjv" event={"ID":"0fcf22ea-ee20-47e0-8840-d6a6a1222773","Type":"ContainerStarted","Data":"dd75cb0888029d805255d7737b3422dc9d853fd599af2fbff6c8e0f07d60dc11"} Oct 11 05:50:55 crc kubenswrapper[4651]: I1011 05:50:55.698324 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-sxmt5/must-gather-chfjv" podStartSLOduration=2.676870641 podStartE2EDuration="9.698304905s" podCreationTimestamp="2025-10-11 05:50:46 +0000 UTC" firstStartedPulling="2025-10-11 05:50:47.714906412 +0000 UTC m=+3568.611139248" lastFinishedPulling="2025-10-11 05:50:54.736340716 +0000 UTC m=+3575.632573512" observedRunningTime="2025-10-11 05:50:55.692918066 +0000 UTC m=+3576.589150872" watchObservedRunningTime="2025-10-11 05:50:55.698304905 +0000 UTC m=+3576.594537701" Oct 11 05:50:55 crc kubenswrapper[4651]: I1011 05:50:55.738715 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-f85lr" Oct 11 05:50:55 crc kubenswrapper[4651]: I1011 05:50:55.803144 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-f85lr"] Oct 11 05:50:57 crc kubenswrapper[4651]: I1011 05:50:57.696201 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-f85lr" podUID="c41ed47f-18da-4b3d-a48b-46d373f609eb" containerName="registry-server" containerID="cri-o://7be2016fa07307602c26b214d70a7387ff55024a7b2de6a95bbd5dcc94d1f7db" gracePeriod=2 Oct 11 05:50:58 crc kubenswrapper[4651]: I1011 05:50:58.254520 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-f85lr" Oct 11 05:50:58 crc kubenswrapper[4651]: I1011 05:50:58.399608 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgf6k\" (UniqueName: \"kubernetes.io/projected/c41ed47f-18da-4b3d-a48b-46d373f609eb-kube-api-access-zgf6k\") pod \"c41ed47f-18da-4b3d-a48b-46d373f609eb\" (UID: \"c41ed47f-18da-4b3d-a48b-46d373f609eb\") " Oct 11 05:50:58 crc kubenswrapper[4651]: I1011 05:50:58.399755 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c41ed47f-18da-4b3d-a48b-46d373f609eb-utilities\") pod \"c41ed47f-18da-4b3d-a48b-46d373f609eb\" (UID: \"c41ed47f-18da-4b3d-a48b-46d373f609eb\") " Oct 11 05:50:58 crc kubenswrapper[4651]: I1011 05:50:58.399859 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c41ed47f-18da-4b3d-a48b-46d373f609eb-catalog-content\") pod \"c41ed47f-18da-4b3d-a48b-46d373f609eb\" (UID: \"c41ed47f-18da-4b3d-a48b-46d373f609eb\") " Oct 11 05:50:58 crc kubenswrapper[4651]: I1011 05:50:58.400695 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c41ed47f-18da-4b3d-a48b-46d373f609eb-utilities" (OuterVolumeSpecName: "utilities") pod "c41ed47f-18da-4b3d-a48b-46d373f609eb" (UID: "c41ed47f-18da-4b3d-a48b-46d373f609eb"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 05:50:58 crc kubenswrapper[4651]: I1011 05:50:58.407101 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c41ed47f-18da-4b3d-a48b-46d373f609eb-kube-api-access-zgf6k" (OuterVolumeSpecName: "kube-api-access-zgf6k") pod "c41ed47f-18da-4b3d-a48b-46d373f609eb" (UID: "c41ed47f-18da-4b3d-a48b-46d373f609eb"). InnerVolumeSpecName "kube-api-access-zgf6k". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:50:58 crc kubenswrapper[4651]: I1011 05:50:58.442617 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c41ed47f-18da-4b3d-a48b-46d373f609eb-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c41ed47f-18da-4b3d-a48b-46d373f609eb" (UID: "c41ed47f-18da-4b3d-a48b-46d373f609eb"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 05:50:58 crc kubenswrapper[4651]: I1011 05:50:58.501837 4651 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c41ed47f-18da-4b3d-a48b-46d373f609eb-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 11 05:50:58 crc kubenswrapper[4651]: I1011 05:50:58.502020 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgf6k\" (UniqueName: \"kubernetes.io/projected/c41ed47f-18da-4b3d-a48b-46d373f609eb-kube-api-access-zgf6k\") on node \"crc\" DevicePath \"\"" Oct 11 05:50:58 crc kubenswrapper[4651]: I1011 05:50:58.502075 4651 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c41ed47f-18da-4b3d-a48b-46d373f609eb-utilities\") on node \"crc\" DevicePath \"\"" Oct 11 05:50:58 crc kubenswrapper[4651]: I1011 05:50:58.708622 4651 generic.go:334] "Generic (PLEG): container finished" podID="c41ed47f-18da-4b3d-a48b-46d373f609eb" containerID="7be2016fa07307602c26b214d70a7387ff55024a7b2de6a95bbd5dcc94d1f7db" exitCode=0 Oct 11 05:50:58 crc kubenswrapper[4651]: I1011 05:50:58.710258 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-f85lr" event={"ID":"c41ed47f-18da-4b3d-a48b-46d373f609eb","Type":"ContainerDied","Data":"7be2016fa07307602c26b214d70a7387ff55024a7b2de6a95bbd5dcc94d1f7db"} Oct 11 05:50:58 crc kubenswrapper[4651]: I1011 05:50:58.710367 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-f85lr" event={"ID":"c41ed47f-18da-4b3d-a48b-46d373f609eb","Type":"ContainerDied","Data":"70f91fdfc8ff1064f0906df5d51b69aaa96ce946953f91b8dde47129ff95a137"} Oct 11 05:50:58 crc kubenswrapper[4651]: I1011 05:50:58.710511 4651 scope.go:117] "RemoveContainer" containerID="7be2016fa07307602c26b214d70a7387ff55024a7b2de6a95bbd5dcc94d1f7db" Oct 11 05:50:58 crc kubenswrapper[4651]: I1011 05:50:58.710780 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-f85lr" Oct 11 05:50:58 crc kubenswrapper[4651]: I1011 05:50:58.750496 4651 scope.go:117] "RemoveContainer" containerID="42f3536a1a310503defd1de4de0fd086dc53d9085614ff5a255e0dc1a18b2670" Oct 11 05:50:58 crc kubenswrapper[4651]: I1011 05:50:58.761905 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-f85lr"] Oct 11 05:50:58 crc kubenswrapper[4651]: I1011 05:50:58.769423 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-f85lr"] Oct 11 05:50:58 crc kubenswrapper[4651]: I1011 05:50:58.777029 4651 scope.go:117] "RemoveContainer" containerID="a79e3d09475ff64e5e54150253e92caa4c8f0da62f2818b95086c94974a24164" Oct 11 05:50:58 crc kubenswrapper[4651]: I1011 05:50:58.818680 4651 scope.go:117] "RemoveContainer" containerID="7be2016fa07307602c26b214d70a7387ff55024a7b2de6a95bbd5dcc94d1f7db" Oct 11 05:50:58 crc kubenswrapper[4651]: E1011 05:50:58.819387 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7be2016fa07307602c26b214d70a7387ff55024a7b2de6a95bbd5dcc94d1f7db\": container with ID starting with 7be2016fa07307602c26b214d70a7387ff55024a7b2de6a95bbd5dcc94d1f7db not found: ID does not exist" containerID="7be2016fa07307602c26b214d70a7387ff55024a7b2de6a95bbd5dcc94d1f7db" Oct 11 05:50:58 crc kubenswrapper[4651]: I1011 05:50:58.819514 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7be2016fa07307602c26b214d70a7387ff55024a7b2de6a95bbd5dcc94d1f7db"} err="failed to get container status \"7be2016fa07307602c26b214d70a7387ff55024a7b2de6a95bbd5dcc94d1f7db\": rpc error: code = NotFound desc = could not find container \"7be2016fa07307602c26b214d70a7387ff55024a7b2de6a95bbd5dcc94d1f7db\": container with ID starting with 7be2016fa07307602c26b214d70a7387ff55024a7b2de6a95bbd5dcc94d1f7db not found: ID does not exist" Oct 11 05:50:58 crc kubenswrapper[4651]: I1011 05:50:58.819626 4651 scope.go:117] "RemoveContainer" containerID="42f3536a1a310503defd1de4de0fd086dc53d9085614ff5a255e0dc1a18b2670" Oct 11 05:50:58 crc kubenswrapper[4651]: E1011 05:50:58.820223 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"42f3536a1a310503defd1de4de0fd086dc53d9085614ff5a255e0dc1a18b2670\": container with ID starting with 42f3536a1a310503defd1de4de0fd086dc53d9085614ff5a255e0dc1a18b2670 not found: ID does not exist" containerID="42f3536a1a310503defd1de4de0fd086dc53d9085614ff5a255e0dc1a18b2670" Oct 11 05:50:58 crc kubenswrapper[4651]: I1011 05:50:58.820286 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"42f3536a1a310503defd1de4de0fd086dc53d9085614ff5a255e0dc1a18b2670"} err="failed to get container status \"42f3536a1a310503defd1de4de0fd086dc53d9085614ff5a255e0dc1a18b2670\": rpc error: code = NotFound desc = could not find container \"42f3536a1a310503defd1de4de0fd086dc53d9085614ff5a255e0dc1a18b2670\": container with ID starting with 42f3536a1a310503defd1de4de0fd086dc53d9085614ff5a255e0dc1a18b2670 not found: ID does not exist" Oct 11 05:50:58 crc kubenswrapper[4651]: I1011 05:50:58.820335 4651 scope.go:117] "RemoveContainer" containerID="a79e3d09475ff64e5e54150253e92caa4c8f0da62f2818b95086c94974a24164" Oct 11 05:50:58 crc kubenswrapper[4651]: E1011 05:50:58.820714 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a79e3d09475ff64e5e54150253e92caa4c8f0da62f2818b95086c94974a24164\": container with ID starting with a79e3d09475ff64e5e54150253e92caa4c8f0da62f2818b95086c94974a24164 not found: ID does not exist" containerID="a79e3d09475ff64e5e54150253e92caa4c8f0da62f2818b95086c94974a24164" Oct 11 05:50:58 crc kubenswrapper[4651]: I1011 05:50:58.820815 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a79e3d09475ff64e5e54150253e92caa4c8f0da62f2818b95086c94974a24164"} err="failed to get container status \"a79e3d09475ff64e5e54150253e92caa4c8f0da62f2818b95086c94974a24164\": rpc error: code = NotFound desc = could not find container \"a79e3d09475ff64e5e54150253e92caa4c8f0da62f2818b95086c94974a24164\": container with ID starting with a79e3d09475ff64e5e54150253e92caa4c8f0da62f2818b95086c94974a24164 not found: ID does not exist" Oct 11 05:50:58 crc kubenswrapper[4651]: I1011 05:50:58.940255 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-sxmt5/crc-debug-24w2n"] Oct 11 05:50:58 crc kubenswrapper[4651]: E1011 05:50:58.941309 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c41ed47f-18da-4b3d-a48b-46d373f609eb" containerName="registry-server" Oct 11 05:50:58 crc kubenswrapper[4651]: I1011 05:50:58.941380 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="c41ed47f-18da-4b3d-a48b-46d373f609eb" containerName="registry-server" Oct 11 05:50:58 crc kubenswrapper[4651]: E1011 05:50:58.941506 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c41ed47f-18da-4b3d-a48b-46d373f609eb" containerName="extract-content" Oct 11 05:50:58 crc kubenswrapper[4651]: I1011 05:50:58.941553 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="c41ed47f-18da-4b3d-a48b-46d373f609eb" containerName="extract-content" Oct 11 05:50:58 crc kubenswrapper[4651]: E1011 05:50:58.941607 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c41ed47f-18da-4b3d-a48b-46d373f609eb" containerName="extract-utilities" Oct 11 05:50:58 crc kubenswrapper[4651]: I1011 05:50:58.941718 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="c41ed47f-18da-4b3d-a48b-46d373f609eb" containerName="extract-utilities" Oct 11 05:50:58 crc kubenswrapper[4651]: I1011 05:50:58.942008 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="c41ed47f-18da-4b3d-a48b-46d373f609eb" containerName="registry-server" Oct 11 05:50:58 crc kubenswrapper[4651]: I1011 05:50:58.942868 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-sxmt5/crc-debug-24w2n" Oct 11 05:50:59 crc kubenswrapper[4651]: I1011 05:50:59.117540 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9dfcef72-0770-4c7f-94bb-c22a97dfab70-host\") pod \"crc-debug-24w2n\" (UID: \"9dfcef72-0770-4c7f-94bb-c22a97dfab70\") " pod="openshift-must-gather-sxmt5/crc-debug-24w2n" Oct 11 05:50:59 crc kubenswrapper[4651]: I1011 05:50:59.117665 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-48m2c\" (UniqueName: \"kubernetes.io/projected/9dfcef72-0770-4c7f-94bb-c22a97dfab70-kube-api-access-48m2c\") pod \"crc-debug-24w2n\" (UID: \"9dfcef72-0770-4c7f-94bb-c22a97dfab70\") " pod="openshift-must-gather-sxmt5/crc-debug-24w2n" Oct 11 05:50:59 crc kubenswrapper[4651]: I1011 05:50:59.220690 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9dfcef72-0770-4c7f-94bb-c22a97dfab70-host\") pod \"crc-debug-24w2n\" (UID: \"9dfcef72-0770-4c7f-94bb-c22a97dfab70\") " pod="openshift-must-gather-sxmt5/crc-debug-24w2n" Oct 11 05:50:59 crc kubenswrapper[4651]: I1011 05:50:59.220852 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-48m2c\" (UniqueName: \"kubernetes.io/projected/9dfcef72-0770-4c7f-94bb-c22a97dfab70-kube-api-access-48m2c\") pod \"crc-debug-24w2n\" (UID: \"9dfcef72-0770-4c7f-94bb-c22a97dfab70\") " pod="openshift-must-gather-sxmt5/crc-debug-24w2n" Oct 11 05:50:59 crc kubenswrapper[4651]: I1011 05:50:59.220914 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9dfcef72-0770-4c7f-94bb-c22a97dfab70-host\") pod \"crc-debug-24w2n\" (UID: \"9dfcef72-0770-4c7f-94bb-c22a97dfab70\") " pod="openshift-must-gather-sxmt5/crc-debug-24w2n" Oct 11 05:50:59 crc kubenswrapper[4651]: I1011 05:50:59.242595 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-48m2c\" (UniqueName: \"kubernetes.io/projected/9dfcef72-0770-4c7f-94bb-c22a97dfab70-kube-api-access-48m2c\") pod \"crc-debug-24w2n\" (UID: \"9dfcef72-0770-4c7f-94bb-c22a97dfab70\") " pod="openshift-must-gather-sxmt5/crc-debug-24w2n" Oct 11 05:50:59 crc kubenswrapper[4651]: I1011 05:50:59.259046 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-sxmt5/crc-debug-24w2n" Oct 11 05:50:59 crc kubenswrapper[4651]: I1011 05:50:59.720325 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-sxmt5/crc-debug-24w2n" event={"ID":"9dfcef72-0770-4c7f-94bb-c22a97dfab70","Type":"ContainerStarted","Data":"cb781ddcaff21c9c7d952b35ba99e9653de7b7b1e3c9826ff67fe0cff4fd466a"} Oct 11 05:50:59 crc kubenswrapper[4651]: I1011 05:50:59.888020 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c41ed47f-18da-4b3d-a48b-46d373f609eb" path="/var/lib/kubelet/pods/c41ed47f-18da-4b3d-a48b-46d373f609eb/volumes" Oct 11 05:51:11 crc kubenswrapper[4651]: I1011 05:51:11.850109 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-sxmt5/crc-debug-24w2n" event={"ID":"9dfcef72-0770-4c7f-94bb-c22a97dfab70","Type":"ContainerStarted","Data":"105eb2b686960045cfc713c6465911ef84fad014b4f4a351a933c0316ac4b39d"} Oct 11 05:51:11 crc kubenswrapper[4651]: I1011 05:51:11.888164 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-sxmt5/crc-debug-24w2n" podStartSLOduration=2.21015758 podStartE2EDuration="13.888146004s" podCreationTimestamp="2025-10-11 05:50:58 +0000 UTC" firstStartedPulling="2025-10-11 05:50:59.302993266 +0000 UTC m=+3580.199226062" lastFinishedPulling="2025-10-11 05:51:10.98098169 +0000 UTC m=+3591.877214486" observedRunningTime="2025-10-11 05:51:11.885009553 +0000 UTC m=+3592.781242359" watchObservedRunningTime="2025-10-11 05:51:11.888146004 +0000 UTC m=+3592.784378800" Oct 11 05:51:16 crc kubenswrapper[4651]: I1011 05:51:16.310282 4651 patch_prober.go:28] interesting pod/machine-config-daemon-78jnv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 05:51:16 crc kubenswrapper[4651]: I1011 05:51:16.310842 4651 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 05:51:46 crc kubenswrapper[4651]: I1011 05:51:46.310316 4651 patch_prober.go:28] interesting pod/machine-config-daemon-78jnv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 05:51:46 crc kubenswrapper[4651]: I1011 05:51:46.310904 4651 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 05:51:46 crc kubenswrapper[4651]: I1011 05:51:46.310960 4651 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" Oct 11 05:51:46 crc kubenswrapper[4651]: I1011 05:51:46.311730 4651 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"c912b3bc11354e42d7455ad2000403f0825b71426687440ea00315847957d4d3"} pod="openshift-machine-config-operator/machine-config-daemon-78jnv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 11 05:51:46 crc kubenswrapper[4651]: I1011 05:51:46.311786 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" containerName="machine-config-daemon" containerID="cri-o://c912b3bc11354e42d7455ad2000403f0825b71426687440ea00315847957d4d3" gracePeriod=600 Oct 11 05:51:46 crc kubenswrapper[4651]: E1011 05:51:46.461605 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 05:51:47 crc kubenswrapper[4651]: I1011 05:51:47.193788 4651 generic.go:334] "Generic (PLEG): container finished" podID="519a1ae1-e964-48b0-8b61-835146df28c1" containerID="c912b3bc11354e42d7455ad2000403f0825b71426687440ea00315847957d4d3" exitCode=0 Oct 11 05:51:47 crc kubenswrapper[4651]: I1011 05:51:47.193847 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" event={"ID":"519a1ae1-e964-48b0-8b61-835146df28c1","Type":"ContainerDied","Data":"c912b3bc11354e42d7455ad2000403f0825b71426687440ea00315847957d4d3"} Oct 11 05:51:47 crc kubenswrapper[4651]: I1011 05:51:47.193898 4651 scope.go:117] "RemoveContainer" containerID="1098930e9e839a15ff635912f8f330c5224821ed8362696a83b86f2f3eaded43" Oct 11 05:51:47 crc kubenswrapper[4651]: I1011 05:51:47.194629 4651 scope.go:117] "RemoveContainer" containerID="c912b3bc11354e42d7455ad2000403f0825b71426687440ea00315847957d4d3" Oct 11 05:51:47 crc kubenswrapper[4651]: E1011 05:51:47.195045 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 05:51:48 crc kubenswrapper[4651]: I1011 05:51:48.204152 4651 generic.go:334] "Generic (PLEG): container finished" podID="9dfcef72-0770-4c7f-94bb-c22a97dfab70" containerID="105eb2b686960045cfc713c6465911ef84fad014b4f4a351a933c0316ac4b39d" exitCode=0 Oct 11 05:51:48 crc kubenswrapper[4651]: I1011 05:51:48.204497 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-sxmt5/crc-debug-24w2n" event={"ID":"9dfcef72-0770-4c7f-94bb-c22a97dfab70","Type":"ContainerDied","Data":"105eb2b686960045cfc713c6465911ef84fad014b4f4a351a933c0316ac4b39d"} Oct 11 05:51:49 crc kubenswrapper[4651]: I1011 05:51:49.300535 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-sxmt5/crc-debug-24w2n" Oct 11 05:51:49 crc kubenswrapper[4651]: I1011 05:51:49.349959 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-sxmt5/crc-debug-24w2n"] Oct 11 05:51:49 crc kubenswrapper[4651]: I1011 05:51:49.358894 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-sxmt5/crc-debug-24w2n"] Oct 11 05:51:49 crc kubenswrapper[4651]: I1011 05:51:49.429555 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9dfcef72-0770-4c7f-94bb-c22a97dfab70-host\") pod \"9dfcef72-0770-4c7f-94bb-c22a97dfab70\" (UID: \"9dfcef72-0770-4c7f-94bb-c22a97dfab70\") " Oct 11 05:51:49 crc kubenswrapper[4651]: I1011 05:51:49.429683 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9dfcef72-0770-4c7f-94bb-c22a97dfab70-host" (OuterVolumeSpecName: "host") pod "9dfcef72-0770-4c7f-94bb-c22a97dfab70" (UID: "9dfcef72-0770-4c7f-94bb-c22a97dfab70"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 11 05:51:49 crc kubenswrapper[4651]: I1011 05:51:49.430067 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-48m2c\" (UniqueName: \"kubernetes.io/projected/9dfcef72-0770-4c7f-94bb-c22a97dfab70-kube-api-access-48m2c\") pod \"9dfcef72-0770-4c7f-94bb-c22a97dfab70\" (UID: \"9dfcef72-0770-4c7f-94bb-c22a97dfab70\") " Oct 11 05:51:49 crc kubenswrapper[4651]: I1011 05:51:49.430601 4651 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9dfcef72-0770-4c7f-94bb-c22a97dfab70-host\") on node \"crc\" DevicePath \"\"" Oct 11 05:51:49 crc kubenswrapper[4651]: I1011 05:51:49.436156 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9dfcef72-0770-4c7f-94bb-c22a97dfab70-kube-api-access-48m2c" (OuterVolumeSpecName: "kube-api-access-48m2c") pod "9dfcef72-0770-4c7f-94bb-c22a97dfab70" (UID: "9dfcef72-0770-4c7f-94bb-c22a97dfab70"). InnerVolumeSpecName "kube-api-access-48m2c". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:51:49 crc kubenswrapper[4651]: I1011 05:51:49.532520 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-48m2c\" (UniqueName: \"kubernetes.io/projected/9dfcef72-0770-4c7f-94bb-c22a97dfab70-kube-api-access-48m2c\") on node \"crc\" DevicePath \"\"" Oct 11 05:51:49 crc kubenswrapper[4651]: I1011 05:51:49.886527 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9dfcef72-0770-4c7f-94bb-c22a97dfab70" path="/var/lib/kubelet/pods/9dfcef72-0770-4c7f-94bb-c22a97dfab70/volumes" Oct 11 05:51:50 crc kubenswrapper[4651]: I1011 05:51:50.223977 4651 scope.go:117] "RemoveContainer" containerID="105eb2b686960045cfc713c6465911ef84fad014b4f4a351a933c0316ac4b39d" Oct 11 05:51:50 crc kubenswrapper[4651]: I1011 05:51:50.224228 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-sxmt5/crc-debug-24w2n" Oct 11 05:51:50 crc kubenswrapper[4651]: I1011 05:51:50.565880 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-sxmt5/crc-debug-wxxkp"] Oct 11 05:51:50 crc kubenswrapper[4651]: E1011 05:51:50.566592 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9dfcef72-0770-4c7f-94bb-c22a97dfab70" containerName="container-00" Oct 11 05:51:50 crc kubenswrapper[4651]: I1011 05:51:50.566607 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="9dfcef72-0770-4c7f-94bb-c22a97dfab70" containerName="container-00" Oct 11 05:51:50 crc kubenswrapper[4651]: I1011 05:51:50.566949 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="9dfcef72-0770-4c7f-94bb-c22a97dfab70" containerName="container-00" Oct 11 05:51:50 crc kubenswrapper[4651]: I1011 05:51:50.567670 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-sxmt5/crc-debug-wxxkp" Oct 11 05:51:50 crc kubenswrapper[4651]: I1011 05:51:50.655302 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/008168ba-c450-4b20-81a3-7627f0e8c730-host\") pod \"crc-debug-wxxkp\" (UID: \"008168ba-c450-4b20-81a3-7627f0e8c730\") " pod="openshift-must-gather-sxmt5/crc-debug-wxxkp" Oct 11 05:51:50 crc kubenswrapper[4651]: I1011 05:51:50.655447 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-krskc\" (UniqueName: \"kubernetes.io/projected/008168ba-c450-4b20-81a3-7627f0e8c730-kube-api-access-krskc\") pod \"crc-debug-wxxkp\" (UID: \"008168ba-c450-4b20-81a3-7627f0e8c730\") " pod="openshift-must-gather-sxmt5/crc-debug-wxxkp" Oct 11 05:51:50 crc kubenswrapper[4651]: I1011 05:51:50.757498 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/008168ba-c450-4b20-81a3-7627f0e8c730-host\") pod \"crc-debug-wxxkp\" (UID: \"008168ba-c450-4b20-81a3-7627f0e8c730\") " pod="openshift-must-gather-sxmt5/crc-debug-wxxkp" Oct 11 05:51:50 crc kubenswrapper[4651]: I1011 05:51:50.757607 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-krskc\" (UniqueName: \"kubernetes.io/projected/008168ba-c450-4b20-81a3-7627f0e8c730-kube-api-access-krskc\") pod \"crc-debug-wxxkp\" (UID: \"008168ba-c450-4b20-81a3-7627f0e8c730\") " pod="openshift-must-gather-sxmt5/crc-debug-wxxkp" Oct 11 05:51:50 crc kubenswrapper[4651]: I1011 05:51:50.757771 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/008168ba-c450-4b20-81a3-7627f0e8c730-host\") pod \"crc-debug-wxxkp\" (UID: \"008168ba-c450-4b20-81a3-7627f0e8c730\") " pod="openshift-must-gather-sxmt5/crc-debug-wxxkp" Oct 11 05:51:50 crc kubenswrapper[4651]: I1011 05:51:50.786618 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-krskc\" (UniqueName: \"kubernetes.io/projected/008168ba-c450-4b20-81a3-7627f0e8c730-kube-api-access-krskc\") pod \"crc-debug-wxxkp\" (UID: \"008168ba-c450-4b20-81a3-7627f0e8c730\") " pod="openshift-must-gather-sxmt5/crc-debug-wxxkp" Oct 11 05:51:50 crc kubenswrapper[4651]: I1011 05:51:50.884196 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-sxmt5/crc-debug-wxxkp" Oct 11 05:51:51 crc kubenswrapper[4651]: I1011 05:51:51.235403 4651 generic.go:334] "Generic (PLEG): container finished" podID="008168ba-c450-4b20-81a3-7627f0e8c730" containerID="e4e277323551963620a61c5c1de410da4749941f8af6c5b499157b4b858a642c" exitCode=0 Oct 11 05:51:51 crc kubenswrapper[4651]: I1011 05:51:51.235463 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-sxmt5/crc-debug-wxxkp" event={"ID":"008168ba-c450-4b20-81a3-7627f0e8c730","Type":"ContainerDied","Data":"e4e277323551963620a61c5c1de410da4749941f8af6c5b499157b4b858a642c"} Oct 11 05:51:51 crc kubenswrapper[4651]: I1011 05:51:51.235492 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-sxmt5/crc-debug-wxxkp" event={"ID":"008168ba-c450-4b20-81a3-7627f0e8c730","Type":"ContainerStarted","Data":"8a00ff5aaecac522d1ecf1eab52db8f92f4e2ba4b8a9a2b2264020f49d0ad8d1"} Oct 11 05:51:51 crc kubenswrapper[4651]: I1011 05:51:51.635610 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-sxmt5/crc-debug-wxxkp"] Oct 11 05:51:51 crc kubenswrapper[4651]: I1011 05:51:51.646835 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-sxmt5/crc-debug-wxxkp"] Oct 11 05:51:52 crc kubenswrapper[4651]: I1011 05:51:52.329750 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-sxmt5/crc-debug-wxxkp" Oct 11 05:51:52 crc kubenswrapper[4651]: I1011 05:51:52.486210 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-krskc\" (UniqueName: \"kubernetes.io/projected/008168ba-c450-4b20-81a3-7627f0e8c730-kube-api-access-krskc\") pod \"008168ba-c450-4b20-81a3-7627f0e8c730\" (UID: \"008168ba-c450-4b20-81a3-7627f0e8c730\") " Oct 11 05:51:52 crc kubenswrapper[4651]: I1011 05:51:52.487185 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/008168ba-c450-4b20-81a3-7627f0e8c730-host\") pod \"008168ba-c450-4b20-81a3-7627f0e8c730\" (UID: \"008168ba-c450-4b20-81a3-7627f0e8c730\") " Oct 11 05:51:52 crc kubenswrapper[4651]: I1011 05:51:52.487313 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/008168ba-c450-4b20-81a3-7627f0e8c730-host" (OuterVolumeSpecName: "host") pod "008168ba-c450-4b20-81a3-7627f0e8c730" (UID: "008168ba-c450-4b20-81a3-7627f0e8c730"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 11 05:51:52 crc kubenswrapper[4651]: I1011 05:51:52.487782 4651 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/008168ba-c450-4b20-81a3-7627f0e8c730-host\") on node \"crc\" DevicePath \"\"" Oct 11 05:51:52 crc kubenswrapper[4651]: I1011 05:51:52.504487 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/008168ba-c450-4b20-81a3-7627f0e8c730-kube-api-access-krskc" (OuterVolumeSpecName: "kube-api-access-krskc") pod "008168ba-c450-4b20-81a3-7627f0e8c730" (UID: "008168ba-c450-4b20-81a3-7627f0e8c730"). InnerVolumeSpecName "kube-api-access-krskc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:51:52 crc kubenswrapper[4651]: I1011 05:51:52.589712 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-krskc\" (UniqueName: \"kubernetes.io/projected/008168ba-c450-4b20-81a3-7627f0e8c730-kube-api-access-krskc\") on node \"crc\" DevicePath \"\"" Oct 11 05:51:52 crc kubenswrapper[4651]: I1011 05:51:52.821953 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-sxmt5/crc-debug-vbrvf"] Oct 11 05:51:52 crc kubenswrapper[4651]: E1011 05:51:52.822331 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="008168ba-c450-4b20-81a3-7627f0e8c730" containerName="container-00" Oct 11 05:51:52 crc kubenswrapper[4651]: I1011 05:51:52.822343 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="008168ba-c450-4b20-81a3-7627f0e8c730" containerName="container-00" Oct 11 05:51:52 crc kubenswrapper[4651]: I1011 05:51:52.822543 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="008168ba-c450-4b20-81a3-7627f0e8c730" containerName="container-00" Oct 11 05:51:52 crc kubenswrapper[4651]: I1011 05:51:52.823177 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-sxmt5/crc-debug-vbrvf" Oct 11 05:51:52 crc kubenswrapper[4651]: I1011 05:51:52.996956 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/23f28431-7e50-45ef-b823-61cbbb58dddf-host\") pod \"crc-debug-vbrvf\" (UID: \"23f28431-7e50-45ef-b823-61cbbb58dddf\") " pod="openshift-must-gather-sxmt5/crc-debug-vbrvf" Oct 11 05:51:52 crc kubenswrapper[4651]: I1011 05:51:52.997230 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-79kqz\" (UniqueName: \"kubernetes.io/projected/23f28431-7e50-45ef-b823-61cbbb58dddf-kube-api-access-79kqz\") pod \"crc-debug-vbrvf\" (UID: \"23f28431-7e50-45ef-b823-61cbbb58dddf\") " pod="openshift-must-gather-sxmt5/crc-debug-vbrvf" Oct 11 05:51:53 crc kubenswrapper[4651]: I1011 05:51:53.099181 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-79kqz\" (UniqueName: \"kubernetes.io/projected/23f28431-7e50-45ef-b823-61cbbb58dddf-kube-api-access-79kqz\") pod \"crc-debug-vbrvf\" (UID: \"23f28431-7e50-45ef-b823-61cbbb58dddf\") " pod="openshift-must-gather-sxmt5/crc-debug-vbrvf" Oct 11 05:51:53 crc kubenswrapper[4651]: I1011 05:51:53.099277 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/23f28431-7e50-45ef-b823-61cbbb58dddf-host\") pod \"crc-debug-vbrvf\" (UID: \"23f28431-7e50-45ef-b823-61cbbb58dddf\") " pod="openshift-must-gather-sxmt5/crc-debug-vbrvf" Oct 11 05:51:53 crc kubenswrapper[4651]: I1011 05:51:53.099457 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/23f28431-7e50-45ef-b823-61cbbb58dddf-host\") pod \"crc-debug-vbrvf\" (UID: \"23f28431-7e50-45ef-b823-61cbbb58dddf\") " pod="openshift-must-gather-sxmt5/crc-debug-vbrvf" Oct 11 05:51:53 crc kubenswrapper[4651]: I1011 05:51:53.125267 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-79kqz\" (UniqueName: \"kubernetes.io/projected/23f28431-7e50-45ef-b823-61cbbb58dddf-kube-api-access-79kqz\") pod \"crc-debug-vbrvf\" (UID: \"23f28431-7e50-45ef-b823-61cbbb58dddf\") " pod="openshift-must-gather-sxmt5/crc-debug-vbrvf" Oct 11 05:51:53 crc kubenswrapper[4651]: I1011 05:51:53.138044 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-sxmt5/crc-debug-vbrvf" Oct 11 05:51:53 crc kubenswrapper[4651]: I1011 05:51:53.258149 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-sxmt5/crc-debug-vbrvf" event={"ID":"23f28431-7e50-45ef-b823-61cbbb58dddf","Type":"ContainerStarted","Data":"9fb3ee033f8451e7e31977b1b6b76de15d85db4ef91548a0d87c94ae0d144874"} Oct 11 05:51:53 crc kubenswrapper[4651]: I1011 05:51:53.260071 4651 scope.go:117] "RemoveContainer" containerID="e4e277323551963620a61c5c1de410da4749941f8af6c5b499157b4b858a642c" Oct 11 05:51:53 crc kubenswrapper[4651]: I1011 05:51:53.260159 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-sxmt5/crc-debug-wxxkp" Oct 11 05:51:53 crc kubenswrapper[4651]: I1011 05:51:53.890801 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="008168ba-c450-4b20-81a3-7627f0e8c730" path="/var/lib/kubelet/pods/008168ba-c450-4b20-81a3-7627f0e8c730/volumes" Oct 11 05:51:54 crc kubenswrapper[4651]: I1011 05:51:54.270610 4651 generic.go:334] "Generic (PLEG): container finished" podID="23f28431-7e50-45ef-b823-61cbbb58dddf" containerID="32618ae0da78d791b58ea9189012db2b641154486fe1f05edb5c5dd1912a4db2" exitCode=0 Oct 11 05:51:54 crc kubenswrapper[4651]: I1011 05:51:54.270680 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-sxmt5/crc-debug-vbrvf" event={"ID":"23f28431-7e50-45ef-b823-61cbbb58dddf","Type":"ContainerDied","Data":"32618ae0da78d791b58ea9189012db2b641154486fe1f05edb5c5dd1912a4db2"} Oct 11 05:51:54 crc kubenswrapper[4651]: I1011 05:51:54.307043 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-sxmt5/crc-debug-vbrvf"] Oct 11 05:51:54 crc kubenswrapper[4651]: I1011 05:51:54.319801 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-sxmt5/crc-debug-vbrvf"] Oct 11 05:51:55 crc kubenswrapper[4651]: I1011 05:51:55.383696 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-sxmt5/crc-debug-vbrvf" Oct 11 05:51:55 crc kubenswrapper[4651]: I1011 05:51:55.546386 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/23f28431-7e50-45ef-b823-61cbbb58dddf-host\") pod \"23f28431-7e50-45ef-b823-61cbbb58dddf\" (UID: \"23f28431-7e50-45ef-b823-61cbbb58dddf\") " Oct 11 05:51:55 crc kubenswrapper[4651]: I1011 05:51:55.546450 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-79kqz\" (UniqueName: \"kubernetes.io/projected/23f28431-7e50-45ef-b823-61cbbb58dddf-kube-api-access-79kqz\") pod \"23f28431-7e50-45ef-b823-61cbbb58dddf\" (UID: \"23f28431-7e50-45ef-b823-61cbbb58dddf\") " Oct 11 05:51:55 crc kubenswrapper[4651]: I1011 05:51:55.546455 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/23f28431-7e50-45ef-b823-61cbbb58dddf-host" (OuterVolumeSpecName: "host") pod "23f28431-7e50-45ef-b823-61cbbb58dddf" (UID: "23f28431-7e50-45ef-b823-61cbbb58dddf"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 11 05:51:55 crc kubenswrapper[4651]: I1011 05:51:55.547099 4651 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/23f28431-7e50-45ef-b823-61cbbb58dddf-host\") on node \"crc\" DevicePath \"\"" Oct 11 05:51:55 crc kubenswrapper[4651]: I1011 05:51:55.555359 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/23f28431-7e50-45ef-b823-61cbbb58dddf-kube-api-access-79kqz" (OuterVolumeSpecName: "kube-api-access-79kqz") pod "23f28431-7e50-45ef-b823-61cbbb58dddf" (UID: "23f28431-7e50-45ef-b823-61cbbb58dddf"). InnerVolumeSpecName "kube-api-access-79kqz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:51:55 crc kubenswrapper[4651]: I1011 05:51:55.649370 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-79kqz\" (UniqueName: \"kubernetes.io/projected/23f28431-7e50-45ef-b823-61cbbb58dddf-kube-api-access-79kqz\") on node \"crc\" DevicePath \"\"" Oct 11 05:51:55 crc kubenswrapper[4651]: I1011 05:51:55.890183 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="23f28431-7e50-45ef-b823-61cbbb58dddf" path="/var/lib/kubelet/pods/23f28431-7e50-45ef-b823-61cbbb58dddf/volumes" Oct 11 05:51:56 crc kubenswrapper[4651]: I1011 05:51:56.292406 4651 scope.go:117] "RemoveContainer" containerID="32618ae0da78d791b58ea9189012db2b641154486fe1f05edb5c5dd1912a4db2" Oct 11 05:51:56 crc kubenswrapper[4651]: I1011 05:51:56.292416 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-sxmt5/crc-debug-vbrvf" Oct 11 05:51:59 crc kubenswrapper[4651]: I1011 05:51:59.877865 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-694f8cb944-jlqsz_5c85be3f-c6fd-4d66-95ba-87b1502b5548/barbican-api/0.log" Oct 11 05:51:59 crc kubenswrapper[4651]: I1011 05:51:59.879672 4651 scope.go:117] "RemoveContainer" containerID="c912b3bc11354e42d7455ad2000403f0825b71426687440ea00315847957d4d3" Oct 11 05:51:59 crc kubenswrapper[4651]: E1011 05:51:59.879987 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 05:52:00 crc kubenswrapper[4651]: I1011 05:52:00.118174 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-694f8cb944-jlqsz_5c85be3f-c6fd-4d66-95ba-87b1502b5548/barbican-api-log/0.log" Oct 11 05:52:00 crc kubenswrapper[4651]: I1011 05:52:00.151579 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-65475785f8-ljsqt_3d5d4c12-9a64-4b51-9613-7d8905d3367f/barbican-keystone-listener-log/0.log" Oct 11 05:52:00 crc kubenswrapper[4651]: I1011 05:52:00.178190 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-65475785f8-ljsqt_3d5d4c12-9a64-4b51-9613-7d8905d3367f/barbican-keystone-listener/0.log" Oct 11 05:52:00 crc kubenswrapper[4651]: I1011 05:52:00.328303 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-6b84468647-bq8d6_0a7fc0ac-3c48-4cd1-9cbd-78eca125768d/barbican-worker-log/0.log" Oct 11 05:52:00 crc kubenswrapper[4651]: I1011 05:52:00.332420 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-6b84468647-bq8d6_0a7fc0ac-3c48-4cd1-9cbd-78eca125768d/barbican-worker/0.log" Oct 11 05:52:00 crc kubenswrapper[4651]: I1011 05:52:00.579985 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-h2pxs_073ca1d1-d406-4d47-bfdd-1d1ccc6a0444/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Oct 11 05:52:00 crc kubenswrapper[4651]: I1011 05:52:00.586627 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_123a74ae-c8a8-467a-b358-b13ac2cff461/ceilometer-central-agent/0.log" Oct 11 05:52:00 crc kubenswrapper[4651]: I1011 05:52:00.697613 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_123a74ae-c8a8-467a-b358-b13ac2cff461/ceilometer-notification-agent/0.log" Oct 11 05:52:00 crc kubenswrapper[4651]: I1011 05:52:00.796884 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_123a74ae-c8a8-467a-b358-b13ac2cff461/sg-core/0.log" Oct 11 05:52:00 crc kubenswrapper[4651]: I1011 05:52:00.811765 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_123a74ae-c8a8-467a-b358-b13ac2cff461/proxy-httpd/0.log" Oct 11 05:52:01 crc kubenswrapper[4651]: I1011 05:52:01.006240 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_6b53ec0e-8ba3-47a6-9715-84fe1ca28ff1/cinder-api/0.log" Oct 11 05:52:01 crc kubenswrapper[4651]: I1011 05:52:01.047881 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_6b53ec0e-8ba3-47a6-9715-84fe1ca28ff1/cinder-api-log/0.log" Oct 11 05:52:01 crc kubenswrapper[4651]: I1011 05:52:01.111045 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_9bee9214-1b63-4ef6-81ca-507ef630559b/cinder-scheduler/0.log" Oct 11 05:52:01 crc kubenswrapper[4651]: I1011 05:52:01.227285 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_9bee9214-1b63-4ef6-81ca-507ef630559b/probe/0.log" Oct 11 05:52:01 crc kubenswrapper[4651]: I1011 05:52:01.251754 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-jq2ff_6634acd3-8550-4286-ad94-004cfe4c7def/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Oct 11 05:52:01 crc kubenswrapper[4651]: I1011 05:52:01.461353 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-5wgh8_bd4b257e-2d94-4f78-9ff5-cef288fd0858/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 11 05:52:01 crc kubenswrapper[4651]: I1011 05:52:01.491205 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-zjfbr_18c28764-b000-46b3-af99-9410c165ff04/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 11 05:52:01 crc kubenswrapper[4651]: I1011 05:52:01.801051 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-8c6f6df99-jzqc8_6db2b942-f99f-417d-aff6-a37800db6a41/init/0.log" Oct 11 05:52:02 crc kubenswrapper[4651]: I1011 05:52:02.024196 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-8c6f6df99-jzqc8_6db2b942-f99f-417d-aff6-a37800db6a41/init/0.log" Oct 11 05:52:02 crc kubenswrapper[4651]: I1011 05:52:02.038600 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-8c6f6df99-jzqc8_6db2b942-f99f-417d-aff6-a37800db6a41/dnsmasq-dns/0.log" Oct 11 05:52:02 crc kubenswrapper[4651]: I1011 05:52:02.092493 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-xqtc2_0428933e-bd0d-4be4-94a6-25caf11d1f23/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Oct 11 05:52:02 crc kubenswrapper[4651]: I1011 05:52:02.292104 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_ee7479a2-fd16-4ec2-a0e0-28fbaffbce9f/glance-httpd/0.log" Oct 11 05:52:02 crc kubenswrapper[4651]: I1011 05:52:02.319451 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_ee7479a2-fd16-4ec2-a0e0-28fbaffbce9f/glance-log/0.log" Oct 11 05:52:02 crc kubenswrapper[4651]: I1011 05:52:02.496688 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_3a991cdf-ad8d-4392-bb4e-792e607d740c/glance-httpd/0.log" Oct 11 05:52:02 crc kubenswrapper[4651]: I1011 05:52:02.512182 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_3a991cdf-ad8d-4392-bb4e-792e607d740c/glance-log/0.log" Oct 11 05:52:02 crc kubenswrapper[4651]: I1011 05:52:02.677156 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-5f7d84485b-zb5s7_a75ab800-fe1f-453d-b9f9-1cc1f39b6ca7/horizon/0.log" Oct 11 05:52:02 crc kubenswrapper[4651]: I1011 05:52:02.885512 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-xvd4r_751c31c3-37b5-4b70-89ba-3c15aee1b7c3/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Oct 11 05:52:03 crc kubenswrapper[4651]: I1011 05:52:03.006841 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-5f7d84485b-zb5s7_a75ab800-fe1f-453d-b9f9-1cc1f39b6ca7/horizon-log/0.log" Oct 11 05:52:03 crc kubenswrapper[4651]: I1011 05:52:03.030639 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-bbbdw_b6609904-4dac-496c-b95b-583873422810/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 11 05:52:03 crc kubenswrapper[4651]: I1011 05:52:03.288454 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-5dbc5c6b84-lhwcw_89ac3499-9018-4545-9e5f-f6eda0d14302/keystone-api/0.log" Oct 11 05:52:03 crc kubenswrapper[4651]: I1011 05:52:03.331618 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_4fbfad70-f21a-4362-9b53-c955b9cca958/kube-state-metrics/0.log" Oct 11 05:52:03 crc kubenswrapper[4651]: I1011 05:52:03.414303 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-jjxtk_6d343a98-7fde-4f8c-995f-39a826aa5f12/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Oct 11 05:52:03 crc kubenswrapper[4651]: I1011 05:52:03.687976 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-6856c774b5-fq9r6_301b63d9-53a7-49b2-9d71-2b2bf854de89/neutron-httpd/0.log" Oct 11 05:52:03 crc kubenswrapper[4651]: I1011 05:52:03.723906 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-6856c774b5-fq9r6_301b63d9-53a7-49b2-9d71-2b2bf854de89/neutron-api/0.log" Oct 11 05:52:03 crc kubenswrapper[4651]: I1011 05:52:03.820236 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-whlk4_6e7f2e9b-b154-4d49-beea-654732761981/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Oct 11 05:52:04 crc kubenswrapper[4651]: I1011 05:52:04.321068 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_7a57592b-84ea-4d7c-ae5d-fcb7c009cbb0/nova-cell0-conductor-conductor/0.log" Oct 11 05:52:04 crc kubenswrapper[4651]: I1011 05:52:04.333153 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_4791f0e9-7fb3-4b2e-8e67-52e1eb0fde76/nova-api-log/0.log" Oct 11 05:52:04 crc kubenswrapper[4651]: I1011 05:52:04.546363 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_4791f0e9-7fb3-4b2e-8e67-52e1eb0fde76/nova-api-api/0.log" Oct 11 05:52:04 crc kubenswrapper[4651]: I1011 05:52:04.596122 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_d986aa9a-c031-4a99-b2d8-6c09be2fc264/nova-cell1-novncproxy-novncproxy/0.log" Oct 11 05:52:04 crc kubenswrapper[4651]: I1011 05:52:04.648837 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_d2b4f087-b74d-4683-815d-35b6f7736f04/nova-cell1-conductor-conductor/0.log" Oct 11 05:52:05 crc kubenswrapper[4651]: I1011 05:52:05.019476 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-t4rtg_c2b4841c-2ea3-464b-8147-a24437d0d079/nova-edpm-deployment-openstack-edpm-ipam/0.log" Oct 11 05:52:05 crc kubenswrapper[4651]: I1011 05:52:05.151241 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_7d5fc748-aa4e-45c9-a268-9fdc8b2ae358/nova-metadata-log/0.log" Oct 11 05:52:05 crc kubenswrapper[4651]: I1011 05:52:05.437948 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_c7fc16c5-4cac-4da2-82d1-226d056fe645/mysql-bootstrap/0.log" Oct 11 05:52:05 crc kubenswrapper[4651]: I1011 05:52:05.450110 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_14edb1d7-f645-4c9e-8363-0342719b2457/nova-scheduler-scheduler/0.log" Oct 11 05:52:05 crc kubenswrapper[4651]: I1011 05:52:05.558963 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_c7fc16c5-4cac-4da2-82d1-226d056fe645/mysql-bootstrap/0.log" Oct 11 05:52:05 crc kubenswrapper[4651]: I1011 05:52:05.710421 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_c7fc16c5-4cac-4da2-82d1-226d056fe645/galera/0.log" Oct 11 05:52:05 crc kubenswrapper[4651]: I1011 05:52:05.748133 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_6e4d1f39-f0c4-4a19-a525-d0119d4b77e5/mysql-bootstrap/0.log" Oct 11 05:52:05 crc kubenswrapper[4651]: I1011 05:52:05.980553 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_6e4d1f39-f0c4-4a19-a525-d0119d4b77e5/mysql-bootstrap/0.log" Oct 11 05:52:05 crc kubenswrapper[4651]: I1011 05:52:05.996409 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_6e4d1f39-f0c4-4a19-a525-d0119d4b77e5/galera/0.log" Oct 11 05:52:06 crc kubenswrapper[4651]: I1011 05:52:06.131593 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_47aacf69-f6eb-4e85-9b70-8f241bfa812f/openstackclient/0.log" Oct 11 05:52:06 crc kubenswrapper[4651]: I1011 05:52:06.194142 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-4gdx9_058302c4-d304-4a99-afbc-84a558968cfe/ovn-controller/0.log" Oct 11 05:52:06 crc kubenswrapper[4651]: I1011 05:52:06.337240 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_7d5fc748-aa4e-45c9-a268-9fdc8b2ae358/nova-metadata-metadata/0.log" Oct 11 05:52:06 crc kubenswrapper[4651]: I1011 05:52:06.426194 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-6c9fd_6e416ba6-0c00-41f4-857d-3c53c9179e6b/openstack-network-exporter/0.log" Oct 11 05:52:06 crc kubenswrapper[4651]: I1011 05:52:06.582343 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-2rjqb_15e07425-1bc8-43c4-ab2a-9daf9e9f95bb/ovsdb-server-init/0.log" Oct 11 05:52:06 crc kubenswrapper[4651]: I1011 05:52:06.775855 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-2rjqb_15e07425-1bc8-43c4-ab2a-9daf9e9f95bb/ovsdb-server-init/0.log" Oct 11 05:52:06 crc kubenswrapper[4651]: I1011 05:52:06.790625 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-2rjqb_15e07425-1bc8-43c4-ab2a-9daf9e9f95bb/ovs-vswitchd/0.log" Oct 11 05:52:06 crc kubenswrapper[4651]: I1011 05:52:06.796989 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-2rjqb_15e07425-1bc8-43c4-ab2a-9daf9e9f95bb/ovsdb-server/0.log" Oct 11 05:52:07 crc kubenswrapper[4651]: I1011 05:52:07.019382 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-cfgdk_b0507706-1820-417c-824e-e8420fda7baa/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Oct 11 05:52:07 crc kubenswrapper[4651]: I1011 05:52:07.100872 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_a7f3b01c-ab92-45f7-9e89-a76a93a8db6a/ovn-northd/0.log" Oct 11 05:52:07 crc kubenswrapper[4651]: I1011 05:52:07.114648 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_a7f3b01c-ab92-45f7-9e89-a76a93a8db6a/openstack-network-exporter/0.log" Oct 11 05:52:07 crc kubenswrapper[4651]: I1011 05:52:07.336239 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_66e1ea71-4579-48c7-b0c9-8074d1a6f821/ovsdbserver-nb/0.log" Oct 11 05:52:07 crc kubenswrapper[4651]: I1011 05:52:07.349466 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_66e1ea71-4579-48c7-b0c9-8074d1a6f821/openstack-network-exporter/0.log" Oct 11 05:52:07 crc kubenswrapper[4651]: I1011 05:52:07.491440 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_0ddbe514-235a-4191-9f6b-3d785b0b4d21/openstack-network-exporter/0.log" Oct 11 05:52:07 crc kubenswrapper[4651]: I1011 05:52:07.545924 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_0ddbe514-235a-4191-9f6b-3d785b0b4d21/ovsdbserver-sb/0.log" Oct 11 05:52:07 crc kubenswrapper[4651]: I1011 05:52:07.684647 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-74cbfd888-nqwlq_c777922e-553b-44ec-84c1-4b3f6644701b/placement-api/0.log" Oct 11 05:52:07 crc kubenswrapper[4651]: I1011 05:52:07.800865 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-74cbfd888-nqwlq_c777922e-553b-44ec-84c1-4b3f6644701b/placement-log/0.log" Oct 11 05:52:07 crc kubenswrapper[4651]: I1011 05:52:07.843317 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_67140304-66cc-425b-a21c-b09bb0c83b8a/setup-container/0.log" Oct 11 05:52:08 crc kubenswrapper[4651]: I1011 05:52:08.145462 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_67140304-66cc-425b-a21c-b09bb0c83b8a/setup-container/0.log" Oct 11 05:52:08 crc kubenswrapper[4651]: I1011 05:52:08.153618 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_67140304-66cc-425b-a21c-b09bb0c83b8a/rabbitmq/0.log" Oct 11 05:52:08 crc kubenswrapper[4651]: I1011 05:52:08.242437 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_f157ed46-75e8-4f03-b4ec-1234385015bd/setup-container/0.log" Oct 11 05:52:08 crc kubenswrapper[4651]: I1011 05:52:08.644847 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_f157ed46-75e8-4f03-b4ec-1234385015bd/rabbitmq/0.log" Oct 11 05:52:08 crc kubenswrapper[4651]: I1011 05:52:08.648496 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_f157ed46-75e8-4f03-b4ec-1234385015bd/setup-container/0.log" Oct 11 05:52:08 crc kubenswrapper[4651]: I1011 05:52:08.662229 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-dsw67_c5dbb723-ec78-4c80-a7fe-10d7499493c7/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 11 05:52:08 crc kubenswrapper[4651]: I1011 05:52:08.898028 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-kwqbl_f8169fed-dda6-4c75-8a3c-4ecd3b7e1866/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Oct 11 05:52:08 crc kubenswrapper[4651]: I1011 05:52:08.955292 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-jl25m_fdd938c5-0eb6-402b-9ee3-28bd04fbd55e/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Oct 11 05:52:09 crc kubenswrapper[4651]: I1011 05:52:09.150543 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-6g9mw_cbfb3a24-45a0-4455-984c-134812231d47/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 11 05:52:09 crc kubenswrapper[4651]: I1011 05:52:09.152327 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-7vjzq_0dfc1301-868d-4226-917c-475041f220f5/ssh-known-hosts-edpm-deployment/0.log" Oct 11 05:52:09 crc kubenswrapper[4651]: I1011 05:52:09.436464 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-695465f8bc-lbxbx_e8259a36-62ec-4cdc-b377-3574bf0bead5/proxy-server/0.log" Oct 11 05:52:09 crc kubenswrapper[4651]: I1011 05:52:09.465677 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-695465f8bc-lbxbx_e8259a36-62ec-4cdc-b377-3574bf0bead5/proxy-httpd/0.log" Oct 11 05:52:09 crc kubenswrapper[4651]: I1011 05:52:09.632766 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-l2gmr_35bb96b9-93d2-4ab5-a102-11f093a29144/swift-ring-rebalance/0.log" Oct 11 05:52:09 crc kubenswrapper[4651]: I1011 05:52:09.697054 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_aef9d930-4287-490f-85c7-5a791f985a77/account-auditor/0.log" Oct 11 05:52:09 crc kubenswrapper[4651]: I1011 05:52:09.767061 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_aef9d930-4287-490f-85c7-5a791f985a77/account-reaper/0.log" Oct 11 05:52:09 crc kubenswrapper[4651]: I1011 05:52:09.874404 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_aef9d930-4287-490f-85c7-5a791f985a77/account-replicator/0.log" Oct 11 05:52:09 crc kubenswrapper[4651]: I1011 05:52:09.900970 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_aef9d930-4287-490f-85c7-5a791f985a77/account-server/0.log" Oct 11 05:52:09 crc kubenswrapper[4651]: I1011 05:52:09.992968 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_aef9d930-4287-490f-85c7-5a791f985a77/container-auditor/0.log" Oct 11 05:52:10 crc kubenswrapper[4651]: I1011 05:52:10.032041 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_aef9d930-4287-490f-85c7-5a791f985a77/container-replicator/0.log" Oct 11 05:52:10 crc kubenswrapper[4651]: I1011 05:52:10.110261 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_aef9d930-4287-490f-85c7-5a791f985a77/container-server/0.log" Oct 11 05:52:10 crc kubenswrapper[4651]: I1011 05:52:10.138078 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_aef9d930-4287-490f-85c7-5a791f985a77/container-updater/0.log" Oct 11 05:52:10 crc kubenswrapper[4651]: I1011 05:52:10.190941 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_aef9d930-4287-490f-85c7-5a791f985a77/object-auditor/0.log" Oct 11 05:52:10 crc kubenswrapper[4651]: I1011 05:52:10.260569 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_aef9d930-4287-490f-85c7-5a791f985a77/object-expirer/0.log" Oct 11 05:52:10 crc kubenswrapper[4651]: I1011 05:52:10.324850 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_aef9d930-4287-490f-85c7-5a791f985a77/object-replicator/0.log" Oct 11 05:52:10 crc kubenswrapper[4651]: I1011 05:52:10.391229 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_aef9d930-4287-490f-85c7-5a791f985a77/object-server/0.log" Oct 11 05:52:10 crc kubenswrapper[4651]: I1011 05:52:10.488174 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_aef9d930-4287-490f-85c7-5a791f985a77/rsync/0.log" Oct 11 05:52:10 crc kubenswrapper[4651]: I1011 05:52:10.555214 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_aef9d930-4287-490f-85c7-5a791f985a77/object-updater/0.log" Oct 11 05:52:10 crc kubenswrapper[4651]: I1011 05:52:10.571684 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_aef9d930-4287-490f-85c7-5a791f985a77/swift-recon-cron/0.log" Oct 11 05:52:10 crc kubenswrapper[4651]: I1011 05:52:10.769241 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-lzfhh_352a8263-3fc8-49fc-bc0b-6b5671d02fde/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Oct 11 05:52:10 crc kubenswrapper[4651]: I1011 05:52:10.870226 4651 scope.go:117] "RemoveContainer" containerID="c912b3bc11354e42d7455ad2000403f0825b71426687440ea00315847957d4d3" Oct 11 05:52:10 crc kubenswrapper[4651]: E1011 05:52:10.870588 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 05:52:10 crc kubenswrapper[4651]: I1011 05:52:10.905389 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_76b8d472-5f4e-4d97-be15-0f5be51acd85/tempest-tests-tempest-tests-runner/0.log" Oct 11 05:52:11 crc kubenswrapper[4651]: I1011 05:52:11.015792 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_87851c60-a63d-43bd-a248-6dad36680eed/test-operator-logs-container/0.log" Oct 11 05:52:11 crc kubenswrapper[4651]: I1011 05:52:11.089153 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-x9lqn_1187c352-70c0-4b8f-a7fa-300e4093c60c/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Oct 11 05:52:20 crc kubenswrapper[4651]: I1011 05:52:20.650602 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_935c9395-17f6-4c8f-a08a-e3af25a75a9a/memcached/0.log" Oct 11 05:52:23 crc kubenswrapper[4651]: I1011 05:52:23.871618 4651 scope.go:117] "RemoveContainer" containerID="c912b3bc11354e42d7455ad2000403f0825b71426687440ea00315847957d4d3" Oct 11 05:52:23 crc kubenswrapper[4651]: E1011 05:52:23.873550 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 05:52:37 crc kubenswrapper[4651]: I1011 05:52:37.578597 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_4354d16eef074529f4cafdce2fc6f7b790d4265439b68294d417143fbf94bw4_7f8c3a2b-8cc3-4266-bdb2-e896769c8da4/util/0.log" Oct 11 05:52:37 crc kubenswrapper[4651]: I1011 05:52:37.828806 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_4354d16eef074529f4cafdce2fc6f7b790d4265439b68294d417143fbf94bw4_7f8c3a2b-8cc3-4266-bdb2-e896769c8da4/util/0.log" Oct 11 05:52:37 crc kubenswrapper[4651]: I1011 05:52:37.830000 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_4354d16eef074529f4cafdce2fc6f7b790d4265439b68294d417143fbf94bw4_7f8c3a2b-8cc3-4266-bdb2-e896769c8da4/pull/0.log" Oct 11 05:52:37 crc kubenswrapper[4651]: I1011 05:52:37.832380 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_4354d16eef074529f4cafdce2fc6f7b790d4265439b68294d417143fbf94bw4_7f8c3a2b-8cc3-4266-bdb2-e896769c8da4/pull/0.log" Oct 11 05:52:37 crc kubenswrapper[4651]: I1011 05:52:37.870513 4651 scope.go:117] "RemoveContainer" containerID="c912b3bc11354e42d7455ad2000403f0825b71426687440ea00315847957d4d3" Oct 11 05:52:37 crc kubenswrapper[4651]: E1011 05:52:37.870894 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 05:52:38 crc kubenswrapper[4651]: I1011 05:52:38.004857 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_4354d16eef074529f4cafdce2fc6f7b790d4265439b68294d417143fbf94bw4_7f8c3a2b-8cc3-4266-bdb2-e896769c8da4/util/0.log" Oct 11 05:52:38 crc kubenswrapper[4651]: I1011 05:52:38.011660 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_4354d16eef074529f4cafdce2fc6f7b790d4265439b68294d417143fbf94bw4_7f8c3a2b-8cc3-4266-bdb2-e896769c8da4/extract/0.log" Oct 11 05:52:38 crc kubenswrapper[4651]: I1011 05:52:38.067554 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_4354d16eef074529f4cafdce2fc6f7b790d4265439b68294d417143fbf94bw4_7f8c3a2b-8cc3-4266-bdb2-e896769c8da4/pull/0.log" Oct 11 05:52:38 crc kubenswrapper[4651]: I1011 05:52:38.203273 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-64f84fcdbb-gbn6q_cca7099f-c5ef-4109-91f5-b6831d0771e8/kube-rbac-proxy/0.log" Oct 11 05:52:38 crc kubenswrapper[4651]: I1011 05:52:38.255623 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-64f84fcdbb-gbn6q_cca7099f-c5ef-4109-91f5-b6831d0771e8/manager/0.log" Oct 11 05:52:38 crc kubenswrapper[4651]: I1011 05:52:38.292789 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-59cdc64769-xtlln_b5b061bd-7d85-4960-a956-95c7911591a2/kube-rbac-proxy/0.log" Oct 11 05:52:38 crc kubenswrapper[4651]: I1011 05:52:38.447640 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-59cdc64769-xtlln_b5b061bd-7d85-4960-a956-95c7911591a2/manager/0.log" Oct 11 05:52:38 crc kubenswrapper[4651]: I1011 05:52:38.494635 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-687df44cdb-g8m5f_75692ce0-1ecb-4db6-a831-5740382b17e2/kube-rbac-proxy/0.log" Oct 11 05:52:38 crc kubenswrapper[4651]: I1011 05:52:38.530139 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-687df44cdb-g8m5f_75692ce0-1ecb-4db6-a831-5740382b17e2/manager/0.log" Oct 11 05:52:38 crc kubenswrapper[4651]: I1011 05:52:38.652044 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-7bb46cd7d-t88s8_99b28924-6f7a-4232-8fd5-b245178ce2ea/kube-rbac-proxy/0.log" Oct 11 05:52:38 crc kubenswrapper[4651]: I1011 05:52:38.767953 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-7bb46cd7d-t88s8_99b28924-6f7a-4232-8fd5-b245178ce2ea/manager/0.log" Oct 11 05:52:38 crc kubenswrapper[4651]: I1011 05:52:38.840318 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-6d9967f8dd-prncj_d725cd21-efdc-4182-be84-460db3042d11/kube-rbac-proxy/0.log" Oct 11 05:52:38 crc kubenswrapper[4651]: I1011 05:52:38.862345 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-6d9967f8dd-prncj_d725cd21-efdc-4182-be84-460db3042d11/manager/0.log" Oct 11 05:52:38 crc kubenswrapper[4651]: I1011 05:52:38.951901 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-6d74794d9b-c9bzn_dd5122d9-c098-49cf-9723-bc0c31c6ce3b/kube-rbac-proxy/0.log" Oct 11 05:52:39 crc kubenswrapper[4651]: I1011 05:52:39.049189 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-6d74794d9b-c9bzn_dd5122d9-c098-49cf-9723-bc0c31c6ce3b/manager/0.log" Oct 11 05:52:39 crc kubenswrapper[4651]: I1011 05:52:39.170672 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-8678f847b6-vpnkk_50a25e99-d2ec-4b16-a5fa-894e79ee528e/kube-rbac-proxy/0.log" Oct 11 05:52:39 crc kubenswrapper[4651]: I1011 05:52:39.293504 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-8678f847b6-vpnkk_50a25e99-d2ec-4b16-a5fa-894e79ee528e/manager/0.log" Oct 11 05:52:39 crc kubenswrapper[4651]: I1011 05:52:39.335600 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-74cb5cbc49-fwr7j_205c5753-c94e-4bf4-993f-36b798bb489d/kube-rbac-proxy/0.log" Oct 11 05:52:39 crc kubenswrapper[4651]: I1011 05:52:39.367758 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-74cb5cbc49-fwr7j_205c5753-c94e-4bf4-993f-36b798bb489d/manager/0.log" Oct 11 05:52:39 crc kubenswrapper[4651]: I1011 05:52:39.475962 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-ddb98f99b-989mj_b6b16d99-7f05-464a-a338-dcded4fa42fa/kube-rbac-proxy/0.log" Oct 11 05:52:39 crc kubenswrapper[4651]: I1011 05:52:39.583385 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-ddb98f99b-989mj_b6b16d99-7f05-464a-a338-dcded4fa42fa/manager/0.log" Oct 11 05:52:39 crc kubenswrapper[4651]: I1011 05:52:39.689125 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-59578bc799-9d9sm_5901a38b-902a-4822-8483-9d478e61aa40/kube-rbac-proxy/0.log" Oct 11 05:52:39 crc kubenswrapper[4651]: I1011 05:52:39.691972 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-59578bc799-9d9sm_5901a38b-902a-4822-8483-9d478e61aa40/manager/0.log" Oct 11 05:52:39 crc kubenswrapper[4651]: I1011 05:52:39.809992 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-5777b4f897-zdcb9_519c3c0c-07ee-4f48-ba92-d202190d9a49/kube-rbac-proxy/0.log" Oct 11 05:52:39 crc kubenswrapper[4651]: I1011 05:52:39.870385 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-5777b4f897-zdcb9_519c3c0c-07ee-4f48-ba92-d202190d9a49/manager/0.log" Oct 11 05:52:39 crc kubenswrapper[4651]: I1011 05:52:39.940778 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-797d478b46-xgxfd_fcc9418a-3e9d-4c74-849d-b9884077820c/kube-rbac-proxy/0.log" Oct 11 05:52:40 crc kubenswrapper[4651]: I1011 05:52:40.075345 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-797d478b46-xgxfd_fcc9418a-3e9d-4c74-849d-b9884077820c/manager/0.log" Oct 11 05:52:40 crc kubenswrapper[4651]: I1011 05:52:40.116228 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-57bb74c7bf-7mhch_321665c4-bc9a-47e0-a6c4-a54d56ad5ce8/kube-rbac-proxy/0.log" Oct 11 05:52:40 crc kubenswrapper[4651]: I1011 05:52:40.247786 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-57bb74c7bf-7mhch_321665c4-bc9a-47e0-a6c4-a54d56ad5ce8/manager/0.log" Oct 11 05:52:40 crc kubenswrapper[4651]: I1011 05:52:40.317326 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-6d7c7ddf95-27r6g_4aede6f1-d9d0-4c62-b118-7c93fa2af789/manager/0.log" Oct 11 05:52:40 crc kubenswrapper[4651]: I1011 05:52:40.375185 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-6d7c7ddf95-27r6g_4aede6f1-d9d0-4c62-b118-7c93fa2af789/kube-rbac-proxy/0.log" Oct 11 05:52:40 crc kubenswrapper[4651]: I1011 05:52:40.514959 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-6cc7fb757dxbxc6_9105b503-cbba-48d1-acdb-ac21b7c791b4/kube-rbac-proxy/0.log" Oct 11 05:52:40 crc kubenswrapper[4651]: I1011 05:52:40.534350 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-6cc7fb757dxbxc6_9105b503-cbba-48d1-acdb-ac21b7c791b4/manager/0.log" Oct 11 05:52:40 crc kubenswrapper[4651]: I1011 05:52:40.658244 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-776b77588d-7z9rb_7bcc02c2-c9c4-498e-8f95-ace0d1b98899/kube-rbac-proxy/0.log" Oct 11 05:52:40 crc kubenswrapper[4651]: I1011 05:52:40.826427 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-69c9cf8694-vccpk_25b74915-17cd-4558-9801-5a0d5113b578/kube-rbac-proxy/0.log" Oct 11 05:52:41 crc kubenswrapper[4651]: I1011 05:52:41.086997 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-69c9cf8694-vccpk_25b74915-17cd-4558-9801-5a0d5113b578/operator/0.log" Oct 11 05:52:41 crc kubenswrapper[4651]: I1011 05:52:41.099475 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-q6f6m_69aec95d-9651-4e5f-9cb6-a6ca9d5093f5/registry-server/0.log" Oct 11 05:52:41 crc kubenswrapper[4651]: I1011 05:52:41.259386 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-869cc7797f-6m2qz_c9dabf8d-2991-4af0-99c8-084e157e9b52/kube-rbac-proxy/0.log" Oct 11 05:52:41 crc kubenswrapper[4651]: I1011 05:52:41.379723 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-869cc7797f-6m2qz_c9dabf8d-2991-4af0-99c8-084e157e9b52/manager/0.log" Oct 11 05:52:41 crc kubenswrapper[4651]: I1011 05:52:41.458171 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-664664cb68-5s2zz_160294e4-b990-41b3-8f6c-22102366d72c/kube-rbac-proxy/0.log" Oct 11 05:52:41 crc kubenswrapper[4651]: I1011 05:52:41.534358 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-664664cb68-5s2zz_160294e4-b990-41b3-8f6c-22102366d72c/manager/0.log" Oct 11 05:52:41 crc kubenswrapper[4651]: I1011 05:52:41.680624 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-5f97d8c699-dfqdz_bd36db45-dfcf-4d27-8bfb-fcefeff7f0ba/operator/0.log" Oct 11 05:52:41 crc kubenswrapper[4651]: I1011 05:52:41.793609 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f4d5dfdc6-vmqd8_f4175bba-9aae-4faf-8670-f612f867827e/kube-rbac-proxy/0.log" Oct 11 05:52:41 crc kubenswrapper[4651]: I1011 05:52:41.916407 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f4d5dfdc6-vmqd8_f4175bba-9aae-4faf-8670-f612f867827e/manager/0.log" Oct 11 05:52:41 crc kubenswrapper[4651]: I1011 05:52:41.965759 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-776b77588d-7z9rb_7bcc02c2-c9c4-498e-8f95-ace0d1b98899/manager/0.log" Oct 11 05:52:41 crc kubenswrapper[4651]: I1011 05:52:41.968798 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-578874c84d-x9x8w_555de60b-f68b-42a6-a662-d1e5202a30c5/kube-rbac-proxy/0.log" Oct 11 05:52:42 crc kubenswrapper[4651]: I1011 05:52:42.029878 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-578874c84d-x9x8w_555de60b-f68b-42a6-a662-d1e5202a30c5/manager/0.log" Oct 11 05:52:42 crc kubenswrapper[4651]: I1011 05:52:42.133669 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-ffcdd6c94-p9d4d_0e634116-91fc-4fad-b906-a998e77ea3e4/kube-rbac-proxy/0.log" Oct 11 05:52:42 crc kubenswrapper[4651]: I1011 05:52:42.229815 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-ffcdd6c94-p9d4d_0e634116-91fc-4fad-b906-a998e77ea3e4/manager/0.log" Oct 11 05:52:42 crc kubenswrapper[4651]: I1011 05:52:42.250016 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-646675d848-7w8pk_9bfc9d92-8b6d-4b13-9759-b7185e1f16bb/kube-rbac-proxy/0.log" Oct 11 05:52:42 crc kubenswrapper[4651]: I1011 05:52:42.398284 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-646675d848-7w8pk_9bfc9d92-8b6d-4b13-9759-b7185e1f16bb/manager/0.log" Oct 11 05:52:49 crc kubenswrapper[4651]: I1011 05:52:49.879066 4651 scope.go:117] "RemoveContainer" containerID="c912b3bc11354e42d7455ad2000403f0825b71426687440ea00315847957d4d3" Oct 11 05:52:49 crc kubenswrapper[4651]: E1011 05:52:49.879979 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 05:52:59 crc kubenswrapper[4651]: I1011 05:52:59.231676 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-27wgh_f8fd1293-3d68-4dd2-bc12-8f7c02017bcd/control-plane-machine-set-operator/0.log" Oct 11 05:52:59 crc kubenswrapper[4651]: I1011 05:52:59.384313 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-2mj56_b590e4d5-1684-4e2f-b5e9-8fbf00db4546/kube-rbac-proxy/0.log" Oct 11 05:52:59 crc kubenswrapper[4651]: I1011 05:52:59.386766 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-2mj56_b590e4d5-1684-4e2f-b5e9-8fbf00db4546/machine-api-operator/0.log" Oct 11 05:53:01 crc kubenswrapper[4651]: I1011 05:53:01.869878 4651 scope.go:117] "RemoveContainer" containerID="c912b3bc11354e42d7455ad2000403f0825b71426687440ea00315847957d4d3" Oct 11 05:53:01 crc kubenswrapper[4651]: E1011 05:53:01.870663 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 05:53:12 crc kubenswrapper[4651]: I1011 05:53:12.044301 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-pmm46_d483b46d-edb9-4b36-b0a4-3c959e0f6aca/cert-manager-controller/0.log" Oct 11 05:53:12 crc kubenswrapper[4651]: I1011 05:53:12.251429 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-5t4wt_4045e2c1-af15-42ee-bfee-f72d32924237/cert-manager-cainjector/0.log" Oct 11 05:53:12 crc kubenswrapper[4651]: I1011 05:53:12.282199 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-l859c_54d95539-bc93-45d6-a26a-95284f123cde/cert-manager-webhook/0.log" Oct 11 05:53:16 crc kubenswrapper[4651]: I1011 05:53:16.869104 4651 scope.go:117] "RemoveContainer" containerID="c912b3bc11354e42d7455ad2000403f0825b71426687440ea00315847957d4d3" Oct 11 05:53:16 crc kubenswrapper[4651]: E1011 05:53:16.869684 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 05:53:25 crc kubenswrapper[4651]: I1011 05:53:25.002538 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-6b874cbd85-zwhxz_03a76fa3-7b5c-4b1c-9c67-d54f0e448c2d/nmstate-console-plugin/0.log" Oct 11 05:53:25 crc kubenswrapper[4651]: I1011 05:53:25.189228 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-qpgc7_50f6467a-ad8d-4828-81ba-b944dccc4be7/nmstate-handler/0.log" Oct 11 05:53:25 crc kubenswrapper[4651]: I1011 05:53:25.199533 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-fdff9cb8d-gg29l_298ed4c9-1190-4617-a3c7-147f15e1fea3/kube-rbac-proxy/0.log" Oct 11 05:53:25 crc kubenswrapper[4651]: I1011 05:53:25.222720 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-fdff9cb8d-gg29l_298ed4c9-1190-4617-a3c7-147f15e1fea3/nmstate-metrics/0.log" Oct 11 05:53:25 crc kubenswrapper[4651]: I1011 05:53:25.366522 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-858ddd8f98-p95g9_c7c3404e-ed5a-48d9-b525-7451514c9a5c/nmstate-operator/0.log" Oct 11 05:53:25 crc kubenswrapper[4651]: I1011 05:53:25.399793 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-6cdbc54649-dlvdg_85293b68-d608-466d-9aa5-2b5eae8edc74/nmstate-webhook/0.log" Oct 11 05:53:27 crc kubenswrapper[4651]: I1011 05:53:27.872457 4651 scope.go:117] "RemoveContainer" containerID="c912b3bc11354e42d7455ad2000403f0825b71426687440ea00315847957d4d3" Oct 11 05:53:27 crc kubenswrapper[4651]: E1011 05:53:27.873377 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 05:53:38 crc kubenswrapper[4651]: I1011 05:53:38.869617 4651 scope.go:117] "RemoveContainer" containerID="c912b3bc11354e42d7455ad2000403f0825b71426687440ea00315847957d4d3" Oct 11 05:53:38 crc kubenswrapper[4651]: E1011 05:53:38.871260 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 05:53:40 crc kubenswrapper[4651]: I1011 05:53:40.670212 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-68d546b9d8-gbzvh_b23b74fb-01aa-4027-978b-ef5fccb6a023/kube-rbac-proxy/0.log" Oct 11 05:53:40 crc kubenswrapper[4651]: I1011 05:53:40.795284 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-68d546b9d8-gbzvh_b23b74fb-01aa-4027-978b-ef5fccb6a023/controller/0.log" Oct 11 05:53:40 crc kubenswrapper[4651]: I1011 05:53:40.885998 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-64bf5d555-sxrwh_4a2881f0-834f-4e9d-8be5-5adb1f5feefd/frr-k8s-webhook-server/0.log" Oct 11 05:53:40 crc kubenswrapper[4651]: I1011 05:53:40.972646 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-x9n5l_870fb108-5a7a-4ade-82fd-8cf3c09950b8/cp-frr-files/0.log" Oct 11 05:53:41 crc kubenswrapper[4651]: I1011 05:53:41.130957 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-x9n5l_870fb108-5a7a-4ade-82fd-8cf3c09950b8/cp-frr-files/0.log" Oct 11 05:53:41 crc kubenswrapper[4651]: I1011 05:53:41.134230 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-x9n5l_870fb108-5a7a-4ade-82fd-8cf3c09950b8/cp-reloader/0.log" Oct 11 05:53:41 crc kubenswrapper[4651]: I1011 05:53:41.185472 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-x9n5l_870fb108-5a7a-4ade-82fd-8cf3c09950b8/cp-reloader/0.log" Oct 11 05:53:41 crc kubenswrapper[4651]: I1011 05:53:41.193991 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-x9n5l_870fb108-5a7a-4ade-82fd-8cf3c09950b8/cp-metrics/0.log" Oct 11 05:53:41 crc kubenswrapper[4651]: I1011 05:53:41.358323 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-x9n5l_870fb108-5a7a-4ade-82fd-8cf3c09950b8/cp-frr-files/0.log" Oct 11 05:53:41 crc kubenswrapper[4651]: I1011 05:53:41.417803 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-x9n5l_870fb108-5a7a-4ade-82fd-8cf3c09950b8/cp-reloader/0.log" Oct 11 05:53:41 crc kubenswrapper[4651]: I1011 05:53:41.418686 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-x9n5l_870fb108-5a7a-4ade-82fd-8cf3c09950b8/cp-metrics/0.log" Oct 11 05:53:41 crc kubenswrapper[4651]: I1011 05:53:41.428932 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-x9n5l_870fb108-5a7a-4ade-82fd-8cf3c09950b8/cp-metrics/0.log" Oct 11 05:53:41 crc kubenswrapper[4651]: I1011 05:53:41.633372 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-x9n5l_870fb108-5a7a-4ade-82fd-8cf3c09950b8/cp-frr-files/0.log" Oct 11 05:53:41 crc kubenswrapper[4651]: I1011 05:53:41.652954 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-x9n5l_870fb108-5a7a-4ade-82fd-8cf3c09950b8/cp-reloader/0.log" Oct 11 05:53:41 crc kubenswrapper[4651]: I1011 05:53:41.676115 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-x9n5l_870fb108-5a7a-4ade-82fd-8cf3c09950b8/controller/0.log" Oct 11 05:53:41 crc kubenswrapper[4651]: I1011 05:53:41.705253 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-x9n5l_870fb108-5a7a-4ade-82fd-8cf3c09950b8/cp-metrics/0.log" Oct 11 05:53:41 crc kubenswrapper[4651]: I1011 05:53:41.878902 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-x9n5l_870fb108-5a7a-4ade-82fd-8cf3c09950b8/frr-metrics/0.log" Oct 11 05:53:41 crc kubenswrapper[4651]: I1011 05:53:41.934566 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-x9n5l_870fb108-5a7a-4ade-82fd-8cf3c09950b8/kube-rbac-proxy/0.log" Oct 11 05:53:41 crc kubenswrapper[4651]: I1011 05:53:41.938094 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-x9n5l_870fb108-5a7a-4ade-82fd-8cf3c09950b8/kube-rbac-proxy-frr/0.log" Oct 11 05:53:42 crc kubenswrapper[4651]: I1011 05:53:42.072532 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-x9n5l_870fb108-5a7a-4ade-82fd-8cf3c09950b8/reloader/0.log" Oct 11 05:53:42 crc kubenswrapper[4651]: I1011 05:53:42.263606 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-64d4c8dfd9-ssxfc_176a1bb0-149f-47fd-b9ba-d3249b405fa1/manager/0.log" Oct 11 05:53:42 crc kubenswrapper[4651]: I1011 05:53:42.399269 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-6ddcb68dc7-mhfj6_3da924ac-510e-4c75-8e5d-2571c454a7a5/webhook-server/0.log" Oct 11 05:53:42 crc kubenswrapper[4651]: I1011 05:53:42.619073 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-j7mlx_a8bd8e31-66a3-492c-8cc4-be80c4b0dcb6/kube-rbac-proxy/0.log" Oct 11 05:53:43 crc kubenswrapper[4651]: I1011 05:53:43.085291 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-j7mlx_a8bd8e31-66a3-492c-8cc4-be80c4b0dcb6/speaker/0.log" Oct 11 05:53:43 crc kubenswrapper[4651]: I1011 05:53:43.252222 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-x9n5l_870fb108-5a7a-4ade-82fd-8cf3c09950b8/frr/0.log" Oct 11 05:53:50 crc kubenswrapper[4651]: I1011 05:53:50.870776 4651 scope.go:117] "RemoveContainer" containerID="c912b3bc11354e42d7455ad2000403f0825b71426687440ea00315847957d4d3" Oct 11 05:53:50 crc kubenswrapper[4651]: E1011 05:53:50.871919 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 05:53:58 crc kubenswrapper[4651]: I1011 05:53:58.776020 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2xfp9d_080b871c-11bf-4ef1-b785-9058524b6c82/util/0.log" Oct 11 05:53:58 crc kubenswrapper[4651]: I1011 05:53:58.963340 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2xfp9d_080b871c-11bf-4ef1-b785-9058524b6c82/pull/0.log" Oct 11 05:53:59 crc kubenswrapper[4651]: I1011 05:53:59.024650 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2xfp9d_080b871c-11bf-4ef1-b785-9058524b6c82/pull/0.log" Oct 11 05:53:59 crc kubenswrapper[4651]: I1011 05:53:59.065995 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2xfp9d_080b871c-11bf-4ef1-b785-9058524b6c82/util/0.log" Oct 11 05:53:59 crc kubenswrapper[4651]: I1011 05:53:59.171197 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2xfp9d_080b871c-11bf-4ef1-b785-9058524b6c82/pull/0.log" Oct 11 05:53:59 crc kubenswrapper[4651]: I1011 05:53:59.193492 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2xfp9d_080b871c-11bf-4ef1-b785-9058524b6c82/util/0.log" Oct 11 05:53:59 crc kubenswrapper[4651]: I1011 05:53:59.281156 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2xfp9d_080b871c-11bf-4ef1-b785-9058524b6c82/extract/0.log" Oct 11 05:53:59 crc kubenswrapper[4651]: I1011 05:53:59.351052 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-bmwjb_d813ef9d-786d-41a3-8170-90be0cf304bf/extract-utilities/0.log" Oct 11 05:53:59 crc kubenswrapper[4651]: I1011 05:53:59.713609 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-bmwjb_d813ef9d-786d-41a3-8170-90be0cf304bf/extract-utilities/0.log" Oct 11 05:53:59 crc kubenswrapper[4651]: I1011 05:53:59.756921 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-bmwjb_d813ef9d-786d-41a3-8170-90be0cf304bf/extract-content/0.log" Oct 11 05:53:59 crc kubenswrapper[4651]: I1011 05:53:59.773779 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-bmwjb_d813ef9d-786d-41a3-8170-90be0cf304bf/extract-content/0.log" Oct 11 05:54:00 crc kubenswrapper[4651]: I1011 05:54:00.014548 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-bmwjb_d813ef9d-786d-41a3-8170-90be0cf304bf/extract-utilities/0.log" Oct 11 05:54:00 crc kubenswrapper[4651]: I1011 05:54:00.019176 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-bmwjb_d813ef9d-786d-41a3-8170-90be0cf304bf/extract-content/0.log" Oct 11 05:54:00 crc kubenswrapper[4651]: I1011 05:54:00.283261 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-vs2v6_819c85ef-b451-47ed-88f6-1790f362d446/extract-utilities/0.log" Oct 11 05:54:00 crc kubenswrapper[4651]: I1011 05:54:00.406769 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-bmwjb_d813ef9d-786d-41a3-8170-90be0cf304bf/registry-server/0.log" Oct 11 05:54:00 crc kubenswrapper[4651]: I1011 05:54:00.437971 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-vs2v6_819c85ef-b451-47ed-88f6-1790f362d446/extract-content/0.log" Oct 11 05:54:00 crc kubenswrapper[4651]: I1011 05:54:00.453135 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-vs2v6_819c85ef-b451-47ed-88f6-1790f362d446/extract-utilities/0.log" Oct 11 05:54:00 crc kubenswrapper[4651]: I1011 05:54:00.503925 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-vs2v6_819c85ef-b451-47ed-88f6-1790f362d446/extract-content/0.log" Oct 11 05:54:00 crc kubenswrapper[4651]: I1011 05:54:00.686646 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-vs2v6_819c85ef-b451-47ed-88f6-1790f362d446/extract-content/0.log" Oct 11 05:54:00 crc kubenswrapper[4651]: I1011 05:54:00.778779 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-vs2v6_819c85ef-b451-47ed-88f6-1790f362d446/extract-utilities/0.log" Oct 11 05:54:00 crc kubenswrapper[4651]: I1011 05:54:00.885266 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c729hm_f4b8564c-1ee2-4b5b-8d7a-aa95cda9486c/util/0.log" Oct 11 05:54:00 crc kubenswrapper[4651]: I1011 05:54:00.944676 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-vs2v6_819c85ef-b451-47ed-88f6-1790f362d446/registry-server/0.log" Oct 11 05:54:01 crc kubenswrapper[4651]: I1011 05:54:01.069577 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c729hm_f4b8564c-1ee2-4b5b-8d7a-aa95cda9486c/util/0.log" Oct 11 05:54:01 crc kubenswrapper[4651]: I1011 05:54:01.116599 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c729hm_f4b8564c-1ee2-4b5b-8d7a-aa95cda9486c/pull/0.log" Oct 11 05:54:01 crc kubenswrapper[4651]: I1011 05:54:01.139889 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c729hm_f4b8564c-1ee2-4b5b-8d7a-aa95cda9486c/pull/0.log" Oct 11 05:54:01 crc kubenswrapper[4651]: I1011 05:54:01.301817 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c729hm_f4b8564c-1ee2-4b5b-8d7a-aa95cda9486c/pull/0.log" Oct 11 05:54:01 crc kubenswrapper[4651]: I1011 05:54:01.310598 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c729hm_f4b8564c-1ee2-4b5b-8d7a-aa95cda9486c/util/0.log" Oct 11 05:54:01 crc kubenswrapper[4651]: I1011 05:54:01.318663 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c729hm_f4b8564c-1ee2-4b5b-8d7a-aa95cda9486c/extract/0.log" Oct 11 05:54:01 crc kubenswrapper[4651]: I1011 05:54:01.513513 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-c7pll_112a830a-ce46-4e30-8d29-10f0605944d9/marketplace-operator/0.log" Oct 11 05:54:01 crc kubenswrapper[4651]: I1011 05:54:01.536464 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-rvmdl_0591b55e-2399-450c-9738-6160d1d25ee1/extract-utilities/0.log" Oct 11 05:54:01 crc kubenswrapper[4651]: I1011 05:54:01.708527 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-rvmdl_0591b55e-2399-450c-9738-6160d1d25ee1/extract-utilities/0.log" Oct 11 05:54:01 crc kubenswrapper[4651]: I1011 05:54:01.718426 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-rvmdl_0591b55e-2399-450c-9738-6160d1d25ee1/extract-content/0.log" Oct 11 05:54:01 crc kubenswrapper[4651]: I1011 05:54:01.762424 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-rvmdl_0591b55e-2399-450c-9738-6160d1d25ee1/extract-content/0.log" Oct 11 05:54:01 crc kubenswrapper[4651]: I1011 05:54:01.911181 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-rvmdl_0591b55e-2399-450c-9738-6160d1d25ee1/extract-utilities/0.log" Oct 11 05:54:01 crc kubenswrapper[4651]: I1011 05:54:01.941023 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-rvmdl_0591b55e-2399-450c-9738-6160d1d25ee1/extract-content/0.log" Oct 11 05:54:02 crc kubenswrapper[4651]: I1011 05:54:02.081337 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-rvmdl_0591b55e-2399-450c-9738-6160d1d25ee1/registry-server/0.log" Oct 11 05:54:02 crc kubenswrapper[4651]: I1011 05:54:02.141593 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-strx6_60df6e10-9dc8-478b-a424-a86b47a1ba0a/extract-utilities/0.log" Oct 11 05:54:02 crc kubenswrapper[4651]: I1011 05:54:02.361097 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-strx6_60df6e10-9dc8-478b-a424-a86b47a1ba0a/extract-content/0.log" Oct 11 05:54:02 crc kubenswrapper[4651]: I1011 05:54:02.361109 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-strx6_60df6e10-9dc8-478b-a424-a86b47a1ba0a/extract-content/0.log" Oct 11 05:54:02 crc kubenswrapper[4651]: I1011 05:54:02.383612 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-strx6_60df6e10-9dc8-478b-a424-a86b47a1ba0a/extract-utilities/0.log" Oct 11 05:54:02 crc kubenswrapper[4651]: I1011 05:54:02.544842 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-strx6_60df6e10-9dc8-478b-a424-a86b47a1ba0a/extract-utilities/0.log" Oct 11 05:54:02 crc kubenswrapper[4651]: I1011 05:54:02.604411 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-strx6_60df6e10-9dc8-478b-a424-a86b47a1ba0a/extract-content/0.log" Oct 11 05:54:03 crc kubenswrapper[4651]: I1011 05:54:03.093233 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-strx6_60df6e10-9dc8-478b-a424-a86b47a1ba0a/registry-server/0.log" Oct 11 05:54:04 crc kubenswrapper[4651]: I1011 05:54:04.870336 4651 scope.go:117] "RemoveContainer" containerID="c912b3bc11354e42d7455ad2000403f0825b71426687440ea00315847957d4d3" Oct 11 05:54:04 crc kubenswrapper[4651]: E1011 05:54:04.871400 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 05:54:16 crc kubenswrapper[4651]: I1011 05:54:16.869934 4651 scope.go:117] "RemoveContainer" containerID="c912b3bc11354e42d7455ad2000403f0825b71426687440ea00315847957d4d3" Oct 11 05:54:16 crc kubenswrapper[4651]: E1011 05:54:16.873138 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 05:54:29 crc kubenswrapper[4651]: I1011 05:54:29.872837 4651 scope.go:117] "RemoveContainer" containerID="c912b3bc11354e42d7455ad2000403f0825b71426687440ea00315847957d4d3" Oct 11 05:54:29 crc kubenswrapper[4651]: E1011 05:54:29.873689 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 05:54:35 crc kubenswrapper[4651]: I1011 05:54:35.865080 4651 scope.go:117] "RemoveContainer" containerID="c71921a96e8dd0eb42253cf1d3f78b12f9f624df4ba5adc2a3884b422093d8cb" Oct 11 05:54:35 crc kubenswrapper[4651]: I1011 05:54:35.924170 4651 scope.go:117] "RemoveContainer" containerID="2e22fc57dfaf7c3e38a0a96602587e13eb755845d304cb7a319152f275e3e217" Oct 11 05:54:35 crc kubenswrapper[4651]: I1011 05:54:35.956042 4651 scope.go:117] "RemoveContainer" containerID="93300f0f3a8a397b64a4f628888e2cb3520c7f913297832e85e0998ff66664b5" Oct 11 05:54:38 crc kubenswrapper[4651]: E1011 05:54:38.801797 4651 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.182:48492->38.102.83.182:34445: write tcp 38.102.83.182:48492->38.102.83.182:34445: write: broken pipe Oct 11 05:54:42 crc kubenswrapper[4651]: I1011 05:54:42.870652 4651 scope.go:117] "RemoveContainer" containerID="c912b3bc11354e42d7455ad2000403f0825b71426687440ea00315847957d4d3" Oct 11 05:54:42 crc kubenswrapper[4651]: E1011 05:54:42.871464 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 05:54:54 crc kubenswrapper[4651]: I1011 05:54:54.870506 4651 scope.go:117] "RemoveContainer" containerID="c912b3bc11354e42d7455ad2000403f0825b71426687440ea00315847957d4d3" Oct 11 05:54:54 crc kubenswrapper[4651]: E1011 05:54:54.871746 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 05:55:06 crc kubenswrapper[4651]: I1011 05:55:06.871332 4651 scope.go:117] "RemoveContainer" containerID="c912b3bc11354e42d7455ad2000403f0825b71426687440ea00315847957d4d3" Oct 11 05:55:06 crc kubenswrapper[4651]: E1011 05:55:06.873125 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 05:55:17 crc kubenswrapper[4651]: I1011 05:55:17.871726 4651 scope.go:117] "RemoveContainer" containerID="c912b3bc11354e42d7455ad2000403f0825b71426687440ea00315847957d4d3" Oct 11 05:55:17 crc kubenswrapper[4651]: E1011 05:55:17.873305 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 05:55:32 crc kubenswrapper[4651]: I1011 05:55:32.870412 4651 scope.go:117] "RemoveContainer" containerID="c912b3bc11354e42d7455ad2000403f0825b71426687440ea00315847957d4d3" Oct 11 05:55:32 crc kubenswrapper[4651]: E1011 05:55:32.871167 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 05:55:40 crc kubenswrapper[4651]: I1011 05:55:40.675203 4651 generic.go:334] "Generic (PLEG): container finished" podID="0fcf22ea-ee20-47e0-8840-d6a6a1222773" containerID="dd75cb0888029d805255d7737b3422dc9d853fd599af2fbff6c8e0f07d60dc11" exitCode=0 Oct 11 05:55:40 crc kubenswrapper[4651]: I1011 05:55:40.675256 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-sxmt5/must-gather-chfjv" event={"ID":"0fcf22ea-ee20-47e0-8840-d6a6a1222773","Type":"ContainerDied","Data":"dd75cb0888029d805255d7737b3422dc9d853fd599af2fbff6c8e0f07d60dc11"} Oct 11 05:55:40 crc kubenswrapper[4651]: I1011 05:55:40.676262 4651 scope.go:117] "RemoveContainer" containerID="dd75cb0888029d805255d7737b3422dc9d853fd599af2fbff6c8e0f07d60dc11" Oct 11 05:55:40 crc kubenswrapper[4651]: I1011 05:55:40.920412 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-sxmt5_must-gather-chfjv_0fcf22ea-ee20-47e0-8840-d6a6a1222773/gather/0.log" Oct 11 05:55:46 crc kubenswrapper[4651]: I1011 05:55:46.870017 4651 scope.go:117] "RemoveContainer" containerID="c912b3bc11354e42d7455ad2000403f0825b71426687440ea00315847957d4d3" Oct 11 05:55:46 crc kubenswrapper[4651]: E1011 05:55:46.871026 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 05:55:48 crc kubenswrapper[4651]: I1011 05:55:48.649386 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-sxmt5/must-gather-chfjv"] Oct 11 05:55:48 crc kubenswrapper[4651]: I1011 05:55:48.650034 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-sxmt5/must-gather-chfjv" podUID="0fcf22ea-ee20-47e0-8840-d6a6a1222773" containerName="copy" containerID="cri-o://afc50b5fe309dbdc7cd726a76123ad20a4b928cd9f5a2fb5d90b035577a082ef" gracePeriod=2 Oct 11 05:55:48 crc kubenswrapper[4651]: I1011 05:55:48.663972 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-sxmt5/must-gather-chfjv"] Oct 11 05:55:49 crc kubenswrapper[4651]: I1011 05:55:49.120331 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-sxmt5_must-gather-chfjv_0fcf22ea-ee20-47e0-8840-d6a6a1222773/copy/0.log" Oct 11 05:55:49 crc kubenswrapper[4651]: I1011 05:55:49.121444 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-sxmt5/must-gather-chfjv" Oct 11 05:55:49 crc kubenswrapper[4651]: I1011 05:55:49.161425 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4kcf\" (UniqueName: \"kubernetes.io/projected/0fcf22ea-ee20-47e0-8840-d6a6a1222773-kube-api-access-w4kcf\") pod \"0fcf22ea-ee20-47e0-8840-d6a6a1222773\" (UID: \"0fcf22ea-ee20-47e0-8840-d6a6a1222773\") " Oct 11 05:55:49 crc kubenswrapper[4651]: I1011 05:55:49.161637 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/0fcf22ea-ee20-47e0-8840-d6a6a1222773-must-gather-output\") pod \"0fcf22ea-ee20-47e0-8840-d6a6a1222773\" (UID: \"0fcf22ea-ee20-47e0-8840-d6a6a1222773\") " Oct 11 05:55:49 crc kubenswrapper[4651]: I1011 05:55:49.168149 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0fcf22ea-ee20-47e0-8840-d6a6a1222773-kube-api-access-w4kcf" (OuterVolumeSpecName: "kube-api-access-w4kcf") pod "0fcf22ea-ee20-47e0-8840-d6a6a1222773" (UID: "0fcf22ea-ee20-47e0-8840-d6a6a1222773"). InnerVolumeSpecName "kube-api-access-w4kcf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:55:49 crc kubenswrapper[4651]: I1011 05:55:49.264857 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4kcf\" (UniqueName: \"kubernetes.io/projected/0fcf22ea-ee20-47e0-8840-d6a6a1222773-kube-api-access-w4kcf\") on node \"crc\" DevicePath \"\"" Oct 11 05:55:49 crc kubenswrapper[4651]: I1011 05:55:49.330976 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0fcf22ea-ee20-47e0-8840-d6a6a1222773-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "0fcf22ea-ee20-47e0-8840-d6a6a1222773" (UID: "0fcf22ea-ee20-47e0-8840-d6a6a1222773"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 05:55:49 crc kubenswrapper[4651]: I1011 05:55:49.366459 4651 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/0fcf22ea-ee20-47e0-8840-d6a6a1222773-must-gather-output\") on node \"crc\" DevicePath \"\"" Oct 11 05:55:49 crc kubenswrapper[4651]: I1011 05:55:49.775592 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-sxmt5_must-gather-chfjv_0fcf22ea-ee20-47e0-8840-d6a6a1222773/copy/0.log" Oct 11 05:55:49 crc kubenswrapper[4651]: I1011 05:55:49.776585 4651 generic.go:334] "Generic (PLEG): container finished" podID="0fcf22ea-ee20-47e0-8840-d6a6a1222773" containerID="afc50b5fe309dbdc7cd726a76123ad20a4b928cd9f5a2fb5d90b035577a082ef" exitCode=143 Oct 11 05:55:49 crc kubenswrapper[4651]: I1011 05:55:49.776642 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-sxmt5/must-gather-chfjv" Oct 11 05:55:49 crc kubenswrapper[4651]: I1011 05:55:49.776645 4651 scope.go:117] "RemoveContainer" containerID="afc50b5fe309dbdc7cd726a76123ad20a4b928cd9f5a2fb5d90b035577a082ef" Oct 11 05:55:49 crc kubenswrapper[4651]: I1011 05:55:49.821041 4651 scope.go:117] "RemoveContainer" containerID="dd75cb0888029d805255d7737b3422dc9d853fd599af2fbff6c8e0f07d60dc11" Oct 11 05:55:49 crc kubenswrapper[4651]: I1011 05:55:49.892991 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0fcf22ea-ee20-47e0-8840-d6a6a1222773" path="/var/lib/kubelet/pods/0fcf22ea-ee20-47e0-8840-d6a6a1222773/volumes" Oct 11 05:55:49 crc kubenswrapper[4651]: I1011 05:55:49.934364 4651 scope.go:117] "RemoveContainer" containerID="afc50b5fe309dbdc7cd726a76123ad20a4b928cd9f5a2fb5d90b035577a082ef" Oct 11 05:55:49 crc kubenswrapper[4651]: E1011 05:55:49.935013 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"afc50b5fe309dbdc7cd726a76123ad20a4b928cd9f5a2fb5d90b035577a082ef\": container with ID starting with afc50b5fe309dbdc7cd726a76123ad20a4b928cd9f5a2fb5d90b035577a082ef not found: ID does not exist" containerID="afc50b5fe309dbdc7cd726a76123ad20a4b928cd9f5a2fb5d90b035577a082ef" Oct 11 05:55:49 crc kubenswrapper[4651]: I1011 05:55:49.935071 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"afc50b5fe309dbdc7cd726a76123ad20a4b928cd9f5a2fb5d90b035577a082ef"} err="failed to get container status \"afc50b5fe309dbdc7cd726a76123ad20a4b928cd9f5a2fb5d90b035577a082ef\": rpc error: code = NotFound desc = could not find container \"afc50b5fe309dbdc7cd726a76123ad20a4b928cd9f5a2fb5d90b035577a082ef\": container with ID starting with afc50b5fe309dbdc7cd726a76123ad20a4b928cd9f5a2fb5d90b035577a082ef not found: ID does not exist" Oct 11 05:55:49 crc kubenswrapper[4651]: I1011 05:55:49.935107 4651 scope.go:117] "RemoveContainer" containerID="dd75cb0888029d805255d7737b3422dc9d853fd599af2fbff6c8e0f07d60dc11" Oct 11 05:55:49 crc kubenswrapper[4651]: E1011 05:55:49.935591 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dd75cb0888029d805255d7737b3422dc9d853fd599af2fbff6c8e0f07d60dc11\": container with ID starting with dd75cb0888029d805255d7737b3422dc9d853fd599af2fbff6c8e0f07d60dc11 not found: ID does not exist" containerID="dd75cb0888029d805255d7737b3422dc9d853fd599af2fbff6c8e0f07d60dc11" Oct 11 05:55:49 crc kubenswrapper[4651]: I1011 05:55:49.935662 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dd75cb0888029d805255d7737b3422dc9d853fd599af2fbff6c8e0f07d60dc11"} err="failed to get container status \"dd75cb0888029d805255d7737b3422dc9d853fd599af2fbff6c8e0f07d60dc11\": rpc error: code = NotFound desc = could not find container \"dd75cb0888029d805255d7737b3422dc9d853fd599af2fbff6c8e0f07d60dc11\": container with ID starting with dd75cb0888029d805255d7737b3422dc9d853fd599af2fbff6c8e0f07d60dc11 not found: ID does not exist" Oct 11 05:55:57 crc kubenswrapper[4651]: I1011 05:55:57.870887 4651 scope.go:117] "RemoveContainer" containerID="c912b3bc11354e42d7455ad2000403f0825b71426687440ea00315847957d4d3" Oct 11 05:55:57 crc kubenswrapper[4651]: E1011 05:55:57.872070 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 05:56:12 crc kubenswrapper[4651]: I1011 05:56:12.870187 4651 scope.go:117] "RemoveContainer" containerID="c912b3bc11354e42d7455ad2000403f0825b71426687440ea00315847957d4d3" Oct 11 05:56:12 crc kubenswrapper[4651]: E1011 05:56:12.871638 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 05:56:24 crc kubenswrapper[4651]: I1011 05:56:24.873197 4651 scope.go:117] "RemoveContainer" containerID="c912b3bc11354e42d7455ad2000403f0825b71426687440ea00315847957d4d3" Oct 11 05:56:24 crc kubenswrapper[4651]: E1011 05:56:24.874481 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 05:56:28 crc kubenswrapper[4651]: I1011 05:56:28.600445 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-gr7gt/must-gather-k45rb"] Oct 11 05:56:28 crc kubenswrapper[4651]: E1011 05:56:28.601451 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0fcf22ea-ee20-47e0-8840-d6a6a1222773" containerName="copy" Oct 11 05:56:28 crc kubenswrapper[4651]: I1011 05:56:28.601471 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="0fcf22ea-ee20-47e0-8840-d6a6a1222773" containerName="copy" Oct 11 05:56:28 crc kubenswrapper[4651]: E1011 05:56:28.601480 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23f28431-7e50-45ef-b823-61cbbb58dddf" containerName="container-00" Oct 11 05:56:28 crc kubenswrapper[4651]: I1011 05:56:28.601490 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="23f28431-7e50-45ef-b823-61cbbb58dddf" containerName="container-00" Oct 11 05:56:28 crc kubenswrapper[4651]: E1011 05:56:28.601525 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0fcf22ea-ee20-47e0-8840-d6a6a1222773" containerName="gather" Oct 11 05:56:28 crc kubenswrapper[4651]: I1011 05:56:28.601534 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="0fcf22ea-ee20-47e0-8840-d6a6a1222773" containerName="gather" Oct 11 05:56:28 crc kubenswrapper[4651]: I1011 05:56:28.601785 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="0fcf22ea-ee20-47e0-8840-d6a6a1222773" containerName="gather" Oct 11 05:56:28 crc kubenswrapper[4651]: I1011 05:56:28.601812 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="23f28431-7e50-45ef-b823-61cbbb58dddf" containerName="container-00" Oct 11 05:56:28 crc kubenswrapper[4651]: I1011 05:56:28.601837 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="0fcf22ea-ee20-47e0-8840-d6a6a1222773" containerName="copy" Oct 11 05:56:28 crc kubenswrapper[4651]: I1011 05:56:28.603043 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-gr7gt/must-gather-k45rb" Oct 11 05:56:28 crc kubenswrapper[4651]: I1011 05:56:28.606790 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-gr7gt"/"default-dockercfg-55mv7" Oct 11 05:56:28 crc kubenswrapper[4651]: I1011 05:56:28.607008 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-gr7gt"/"openshift-service-ca.crt" Oct 11 05:56:28 crc kubenswrapper[4651]: I1011 05:56:28.607066 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-gr7gt"/"kube-root-ca.crt" Oct 11 05:56:28 crc kubenswrapper[4651]: I1011 05:56:28.630055 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-gr7gt/must-gather-k45rb"] Oct 11 05:56:28 crc kubenswrapper[4651]: I1011 05:56:28.747037 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/5e95a87d-f2af-4b16-addc-86141bbe88ed-must-gather-output\") pod \"must-gather-k45rb\" (UID: \"5e95a87d-f2af-4b16-addc-86141bbe88ed\") " pod="openshift-must-gather-gr7gt/must-gather-k45rb" Oct 11 05:56:28 crc kubenswrapper[4651]: I1011 05:56:28.747447 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z9tpf\" (UniqueName: \"kubernetes.io/projected/5e95a87d-f2af-4b16-addc-86141bbe88ed-kube-api-access-z9tpf\") pod \"must-gather-k45rb\" (UID: \"5e95a87d-f2af-4b16-addc-86141bbe88ed\") " pod="openshift-must-gather-gr7gt/must-gather-k45rb" Oct 11 05:56:28 crc kubenswrapper[4651]: I1011 05:56:28.849363 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/5e95a87d-f2af-4b16-addc-86141bbe88ed-must-gather-output\") pod \"must-gather-k45rb\" (UID: \"5e95a87d-f2af-4b16-addc-86141bbe88ed\") " pod="openshift-must-gather-gr7gt/must-gather-k45rb" Oct 11 05:56:28 crc kubenswrapper[4651]: I1011 05:56:28.849408 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z9tpf\" (UniqueName: \"kubernetes.io/projected/5e95a87d-f2af-4b16-addc-86141bbe88ed-kube-api-access-z9tpf\") pod \"must-gather-k45rb\" (UID: \"5e95a87d-f2af-4b16-addc-86141bbe88ed\") " pod="openshift-must-gather-gr7gt/must-gather-k45rb" Oct 11 05:56:28 crc kubenswrapper[4651]: I1011 05:56:28.850140 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/5e95a87d-f2af-4b16-addc-86141bbe88ed-must-gather-output\") pod \"must-gather-k45rb\" (UID: \"5e95a87d-f2af-4b16-addc-86141bbe88ed\") " pod="openshift-must-gather-gr7gt/must-gather-k45rb" Oct 11 05:56:28 crc kubenswrapper[4651]: I1011 05:56:28.895441 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z9tpf\" (UniqueName: \"kubernetes.io/projected/5e95a87d-f2af-4b16-addc-86141bbe88ed-kube-api-access-z9tpf\") pod \"must-gather-k45rb\" (UID: \"5e95a87d-f2af-4b16-addc-86141bbe88ed\") " pod="openshift-must-gather-gr7gt/must-gather-k45rb" Oct 11 05:56:28 crc kubenswrapper[4651]: I1011 05:56:28.938359 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-gr7gt/must-gather-k45rb" Oct 11 05:56:29 crc kubenswrapper[4651]: I1011 05:56:29.395914 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-gr7gt/must-gather-k45rb"] Oct 11 05:56:30 crc kubenswrapper[4651]: I1011 05:56:30.261229 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-gr7gt/must-gather-k45rb" event={"ID":"5e95a87d-f2af-4b16-addc-86141bbe88ed","Type":"ContainerStarted","Data":"6dac6a6407628b53bc50c5f46f84ab568d4488bb21284fc8df08db35b91dd4e9"} Oct 11 05:56:30 crc kubenswrapper[4651]: I1011 05:56:30.262132 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-gr7gt/must-gather-k45rb" event={"ID":"5e95a87d-f2af-4b16-addc-86141bbe88ed","Type":"ContainerStarted","Data":"d1cd6cf38c4796790c1d7566c9bd7002cdabb3f83454589783359875d8e4b641"} Oct 11 05:56:30 crc kubenswrapper[4651]: I1011 05:56:30.262156 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-gr7gt/must-gather-k45rb" event={"ID":"5e95a87d-f2af-4b16-addc-86141bbe88ed","Type":"ContainerStarted","Data":"b2623935bb5bebcd500ee2f360ffaf8939ce8b43ac00e45f39deabfcbffe771a"} Oct 11 05:56:32 crc kubenswrapper[4651]: I1011 05:56:32.392143 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-gr7gt/must-gather-k45rb" podStartSLOduration=4.392112828 podStartE2EDuration="4.392112828s" podCreationTimestamp="2025-10-11 05:56:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 05:56:30.282995982 +0000 UTC m=+3911.179228808" watchObservedRunningTime="2025-10-11 05:56:32.392112828 +0000 UTC m=+3913.288345624" Oct 11 05:56:32 crc kubenswrapper[4651]: I1011 05:56:32.397057 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-vm7v6"] Oct 11 05:56:32 crc kubenswrapper[4651]: I1011 05:56:32.402338 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vm7v6" Oct 11 05:56:32 crc kubenswrapper[4651]: I1011 05:56:32.414623 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-vm7v6"] Oct 11 05:56:32 crc kubenswrapper[4651]: I1011 05:56:32.542885 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4b3ba716-9aa8-4cd4-baf5-d6122b10c057-utilities\") pod \"redhat-marketplace-vm7v6\" (UID: \"4b3ba716-9aa8-4cd4-baf5-d6122b10c057\") " pod="openshift-marketplace/redhat-marketplace-vm7v6" Oct 11 05:56:32 crc kubenswrapper[4651]: I1011 05:56:32.543069 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4b3ba716-9aa8-4cd4-baf5-d6122b10c057-catalog-content\") pod \"redhat-marketplace-vm7v6\" (UID: \"4b3ba716-9aa8-4cd4-baf5-d6122b10c057\") " pod="openshift-marketplace/redhat-marketplace-vm7v6" Oct 11 05:56:32 crc kubenswrapper[4651]: I1011 05:56:32.543099 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gdmls\" (UniqueName: \"kubernetes.io/projected/4b3ba716-9aa8-4cd4-baf5-d6122b10c057-kube-api-access-gdmls\") pod \"redhat-marketplace-vm7v6\" (UID: \"4b3ba716-9aa8-4cd4-baf5-d6122b10c057\") " pod="openshift-marketplace/redhat-marketplace-vm7v6" Oct 11 05:56:32 crc kubenswrapper[4651]: I1011 05:56:32.645057 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gdmls\" (UniqueName: \"kubernetes.io/projected/4b3ba716-9aa8-4cd4-baf5-d6122b10c057-kube-api-access-gdmls\") pod \"redhat-marketplace-vm7v6\" (UID: \"4b3ba716-9aa8-4cd4-baf5-d6122b10c057\") " pod="openshift-marketplace/redhat-marketplace-vm7v6" Oct 11 05:56:32 crc kubenswrapper[4651]: I1011 05:56:32.645159 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4b3ba716-9aa8-4cd4-baf5-d6122b10c057-utilities\") pod \"redhat-marketplace-vm7v6\" (UID: \"4b3ba716-9aa8-4cd4-baf5-d6122b10c057\") " pod="openshift-marketplace/redhat-marketplace-vm7v6" Oct 11 05:56:32 crc kubenswrapper[4651]: I1011 05:56:32.645357 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4b3ba716-9aa8-4cd4-baf5-d6122b10c057-catalog-content\") pod \"redhat-marketplace-vm7v6\" (UID: \"4b3ba716-9aa8-4cd4-baf5-d6122b10c057\") " pod="openshift-marketplace/redhat-marketplace-vm7v6" Oct 11 05:56:32 crc kubenswrapper[4651]: I1011 05:56:32.645844 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4b3ba716-9aa8-4cd4-baf5-d6122b10c057-utilities\") pod \"redhat-marketplace-vm7v6\" (UID: \"4b3ba716-9aa8-4cd4-baf5-d6122b10c057\") " pod="openshift-marketplace/redhat-marketplace-vm7v6" Oct 11 05:56:32 crc kubenswrapper[4651]: I1011 05:56:32.645900 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4b3ba716-9aa8-4cd4-baf5-d6122b10c057-catalog-content\") pod \"redhat-marketplace-vm7v6\" (UID: \"4b3ba716-9aa8-4cd4-baf5-d6122b10c057\") " pod="openshift-marketplace/redhat-marketplace-vm7v6" Oct 11 05:56:32 crc kubenswrapper[4651]: I1011 05:56:32.666422 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gdmls\" (UniqueName: \"kubernetes.io/projected/4b3ba716-9aa8-4cd4-baf5-d6122b10c057-kube-api-access-gdmls\") pod \"redhat-marketplace-vm7v6\" (UID: \"4b3ba716-9aa8-4cd4-baf5-d6122b10c057\") " pod="openshift-marketplace/redhat-marketplace-vm7v6" Oct 11 05:56:32 crc kubenswrapper[4651]: I1011 05:56:32.733162 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vm7v6" Oct 11 05:56:33 crc kubenswrapper[4651]: W1011 05:56:33.257176 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4b3ba716_9aa8_4cd4_baf5_d6122b10c057.slice/crio-98beb9e90ef7264c1d639432126340a9ac9b72551fee3255f47c546472fa633a WatchSource:0}: Error finding container 98beb9e90ef7264c1d639432126340a9ac9b72551fee3255f47c546472fa633a: Status 404 returned error can't find the container with id 98beb9e90ef7264c1d639432126340a9ac9b72551fee3255f47c546472fa633a Oct 11 05:56:33 crc kubenswrapper[4651]: I1011 05:56:33.257605 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-vm7v6"] Oct 11 05:56:33 crc kubenswrapper[4651]: I1011 05:56:33.294468 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vm7v6" event={"ID":"4b3ba716-9aa8-4cd4-baf5-d6122b10c057","Type":"ContainerStarted","Data":"98beb9e90ef7264c1d639432126340a9ac9b72551fee3255f47c546472fa633a"} Oct 11 05:56:33 crc kubenswrapper[4651]: I1011 05:56:33.978914 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-gr7gt/crc-debug-gv7nw"] Oct 11 05:56:33 crc kubenswrapper[4651]: I1011 05:56:33.982248 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-gr7gt/crc-debug-gv7nw" Oct 11 05:56:34 crc kubenswrapper[4651]: I1011 05:56:34.083609 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/51e2994c-3647-4536-b336-f0fc8d5e77eb-host\") pod \"crc-debug-gv7nw\" (UID: \"51e2994c-3647-4536-b336-f0fc8d5e77eb\") " pod="openshift-must-gather-gr7gt/crc-debug-gv7nw" Oct 11 05:56:34 crc kubenswrapper[4651]: I1011 05:56:34.083860 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-84lhb\" (UniqueName: \"kubernetes.io/projected/51e2994c-3647-4536-b336-f0fc8d5e77eb-kube-api-access-84lhb\") pod \"crc-debug-gv7nw\" (UID: \"51e2994c-3647-4536-b336-f0fc8d5e77eb\") " pod="openshift-must-gather-gr7gt/crc-debug-gv7nw" Oct 11 05:56:34 crc kubenswrapper[4651]: I1011 05:56:34.186721 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-84lhb\" (UniqueName: \"kubernetes.io/projected/51e2994c-3647-4536-b336-f0fc8d5e77eb-kube-api-access-84lhb\") pod \"crc-debug-gv7nw\" (UID: \"51e2994c-3647-4536-b336-f0fc8d5e77eb\") " pod="openshift-must-gather-gr7gt/crc-debug-gv7nw" Oct 11 05:56:34 crc kubenswrapper[4651]: I1011 05:56:34.186842 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/51e2994c-3647-4536-b336-f0fc8d5e77eb-host\") pod \"crc-debug-gv7nw\" (UID: \"51e2994c-3647-4536-b336-f0fc8d5e77eb\") " pod="openshift-must-gather-gr7gt/crc-debug-gv7nw" Oct 11 05:56:34 crc kubenswrapper[4651]: I1011 05:56:34.186988 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/51e2994c-3647-4536-b336-f0fc8d5e77eb-host\") pod \"crc-debug-gv7nw\" (UID: \"51e2994c-3647-4536-b336-f0fc8d5e77eb\") " pod="openshift-must-gather-gr7gt/crc-debug-gv7nw" Oct 11 05:56:34 crc kubenswrapper[4651]: I1011 05:56:34.214027 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-84lhb\" (UniqueName: \"kubernetes.io/projected/51e2994c-3647-4536-b336-f0fc8d5e77eb-kube-api-access-84lhb\") pod \"crc-debug-gv7nw\" (UID: \"51e2994c-3647-4536-b336-f0fc8d5e77eb\") " pod="openshift-must-gather-gr7gt/crc-debug-gv7nw" Oct 11 05:56:34 crc kubenswrapper[4651]: I1011 05:56:34.298677 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-gr7gt/crc-debug-gv7nw" Oct 11 05:56:34 crc kubenswrapper[4651]: I1011 05:56:34.329558 4651 generic.go:334] "Generic (PLEG): container finished" podID="4b3ba716-9aa8-4cd4-baf5-d6122b10c057" containerID="9e8b2c7ea0431fbecb93a6e3e9c4ec4550bc6c6862d9de009b591d07d4164a48" exitCode=0 Oct 11 05:56:34 crc kubenswrapper[4651]: I1011 05:56:34.329634 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vm7v6" event={"ID":"4b3ba716-9aa8-4cd4-baf5-d6122b10c057","Type":"ContainerDied","Data":"9e8b2c7ea0431fbecb93a6e3e9c4ec4550bc6c6862d9de009b591d07d4164a48"} Oct 11 05:56:34 crc kubenswrapper[4651]: I1011 05:56:34.347557 4651 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 11 05:56:35 crc kubenswrapper[4651]: I1011 05:56:35.341962 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-gr7gt/crc-debug-gv7nw" event={"ID":"51e2994c-3647-4536-b336-f0fc8d5e77eb","Type":"ContainerStarted","Data":"958dd711c196508dd4e4f6fcdc9da32f5e9e4c496234e5fb0faad261d96368a3"} Oct 11 05:56:35 crc kubenswrapper[4651]: I1011 05:56:35.342707 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-gr7gt/crc-debug-gv7nw" event={"ID":"51e2994c-3647-4536-b336-f0fc8d5e77eb","Type":"ContainerStarted","Data":"78348c709dd4e1436a3826d400dee97d67ed7a3b981cab3c693846a788c5dd2c"} Oct 11 05:56:35 crc kubenswrapper[4651]: I1011 05:56:35.347906 4651 generic.go:334] "Generic (PLEG): container finished" podID="4b3ba716-9aa8-4cd4-baf5-d6122b10c057" containerID="fc5dc225c9d0f81d45dd5d987f8b64bfe0be0468677eed404b5a6e2a5bd4b66b" exitCode=0 Oct 11 05:56:35 crc kubenswrapper[4651]: I1011 05:56:35.347952 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vm7v6" event={"ID":"4b3ba716-9aa8-4cd4-baf5-d6122b10c057","Type":"ContainerDied","Data":"fc5dc225c9d0f81d45dd5d987f8b64bfe0be0468677eed404b5a6e2a5bd4b66b"} Oct 11 05:56:35 crc kubenswrapper[4651]: I1011 05:56:35.358491 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-gr7gt/crc-debug-gv7nw" podStartSLOduration=2.358470411 podStartE2EDuration="2.358470411s" podCreationTimestamp="2025-10-11 05:56:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 05:56:35.356943861 +0000 UTC m=+3916.253176657" watchObservedRunningTime="2025-10-11 05:56:35.358470411 +0000 UTC m=+3916.254703207" Oct 11 05:56:36 crc kubenswrapper[4651]: I1011 05:56:36.362371 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vm7v6" event={"ID":"4b3ba716-9aa8-4cd4-baf5-d6122b10c057","Type":"ContainerStarted","Data":"cb749176f336c851126f4551f11cbacf0e88dbfae1cc80c50d69b24a60052128"} Oct 11 05:56:36 crc kubenswrapper[4651]: I1011 05:56:36.384579 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-vm7v6" podStartSLOduration=2.999738925 podStartE2EDuration="4.384562194s" podCreationTimestamp="2025-10-11 05:56:32 +0000 UTC" firstStartedPulling="2025-10-11 05:56:34.347069236 +0000 UTC m=+3915.243302032" lastFinishedPulling="2025-10-11 05:56:35.731892485 +0000 UTC m=+3916.628125301" observedRunningTime="2025-10-11 05:56:36.380374676 +0000 UTC m=+3917.276607492" watchObservedRunningTime="2025-10-11 05:56:36.384562194 +0000 UTC m=+3917.280794980" Oct 11 05:56:37 crc kubenswrapper[4651]: I1011 05:56:37.869390 4651 scope.go:117] "RemoveContainer" containerID="c912b3bc11354e42d7455ad2000403f0825b71426687440ea00315847957d4d3" Oct 11 05:56:37 crc kubenswrapper[4651]: E1011 05:56:37.870266 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 05:56:42 crc kubenswrapper[4651]: I1011 05:56:42.733681 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-vm7v6" Oct 11 05:56:42 crc kubenswrapper[4651]: I1011 05:56:42.734559 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-vm7v6" Oct 11 05:56:42 crc kubenswrapper[4651]: I1011 05:56:42.790853 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-vm7v6" Oct 11 05:56:43 crc kubenswrapper[4651]: I1011 05:56:43.466431 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-vm7v6" Oct 11 05:56:43 crc kubenswrapper[4651]: I1011 05:56:43.515785 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-vm7v6"] Oct 11 05:56:45 crc kubenswrapper[4651]: I1011 05:56:45.434677 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-vm7v6" podUID="4b3ba716-9aa8-4cd4-baf5-d6122b10c057" containerName="registry-server" containerID="cri-o://cb749176f336c851126f4551f11cbacf0e88dbfae1cc80c50d69b24a60052128" gracePeriod=2 Oct 11 05:56:45 crc kubenswrapper[4651]: I1011 05:56:45.940176 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vm7v6" Oct 11 05:56:46 crc kubenswrapper[4651]: I1011 05:56:46.072207 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4b3ba716-9aa8-4cd4-baf5-d6122b10c057-catalog-content\") pod \"4b3ba716-9aa8-4cd4-baf5-d6122b10c057\" (UID: \"4b3ba716-9aa8-4cd4-baf5-d6122b10c057\") " Oct 11 05:56:46 crc kubenswrapper[4651]: I1011 05:56:46.072273 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gdmls\" (UniqueName: \"kubernetes.io/projected/4b3ba716-9aa8-4cd4-baf5-d6122b10c057-kube-api-access-gdmls\") pod \"4b3ba716-9aa8-4cd4-baf5-d6122b10c057\" (UID: \"4b3ba716-9aa8-4cd4-baf5-d6122b10c057\") " Oct 11 05:56:46 crc kubenswrapper[4651]: I1011 05:56:46.072407 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4b3ba716-9aa8-4cd4-baf5-d6122b10c057-utilities\") pod \"4b3ba716-9aa8-4cd4-baf5-d6122b10c057\" (UID: \"4b3ba716-9aa8-4cd4-baf5-d6122b10c057\") " Oct 11 05:56:46 crc kubenswrapper[4651]: I1011 05:56:46.073344 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4b3ba716-9aa8-4cd4-baf5-d6122b10c057-utilities" (OuterVolumeSpecName: "utilities") pod "4b3ba716-9aa8-4cd4-baf5-d6122b10c057" (UID: "4b3ba716-9aa8-4cd4-baf5-d6122b10c057"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 05:56:46 crc kubenswrapper[4651]: I1011 05:56:46.077344 4651 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4b3ba716-9aa8-4cd4-baf5-d6122b10c057-utilities\") on node \"crc\" DevicePath \"\"" Oct 11 05:56:46 crc kubenswrapper[4651]: I1011 05:56:46.085381 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4b3ba716-9aa8-4cd4-baf5-d6122b10c057-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4b3ba716-9aa8-4cd4-baf5-d6122b10c057" (UID: "4b3ba716-9aa8-4cd4-baf5-d6122b10c057"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 05:56:46 crc kubenswrapper[4651]: I1011 05:56:46.107017 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4b3ba716-9aa8-4cd4-baf5-d6122b10c057-kube-api-access-gdmls" (OuterVolumeSpecName: "kube-api-access-gdmls") pod "4b3ba716-9aa8-4cd4-baf5-d6122b10c057" (UID: "4b3ba716-9aa8-4cd4-baf5-d6122b10c057"). InnerVolumeSpecName "kube-api-access-gdmls". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:56:46 crc kubenswrapper[4651]: I1011 05:56:46.179096 4651 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4b3ba716-9aa8-4cd4-baf5-d6122b10c057-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 11 05:56:46 crc kubenswrapper[4651]: I1011 05:56:46.179152 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gdmls\" (UniqueName: \"kubernetes.io/projected/4b3ba716-9aa8-4cd4-baf5-d6122b10c057-kube-api-access-gdmls\") on node \"crc\" DevicePath \"\"" Oct 11 05:56:46 crc kubenswrapper[4651]: I1011 05:56:46.446688 4651 generic.go:334] "Generic (PLEG): container finished" podID="4b3ba716-9aa8-4cd4-baf5-d6122b10c057" containerID="cb749176f336c851126f4551f11cbacf0e88dbfae1cc80c50d69b24a60052128" exitCode=0 Oct 11 05:56:46 crc kubenswrapper[4651]: I1011 05:56:46.446751 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vm7v6" event={"ID":"4b3ba716-9aa8-4cd4-baf5-d6122b10c057","Type":"ContainerDied","Data":"cb749176f336c851126f4551f11cbacf0e88dbfae1cc80c50d69b24a60052128"} Oct 11 05:56:46 crc kubenswrapper[4651]: I1011 05:56:46.446790 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vm7v6" event={"ID":"4b3ba716-9aa8-4cd4-baf5-d6122b10c057","Type":"ContainerDied","Data":"98beb9e90ef7264c1d639432126340a9ac9b72551fee3255f47c546472fa633a"} Oct 11 05:56:46 crc kubenswrapper[4651]: I1011 05:56:46.446838 4651 scope.go:117] "RemoveContainer" containerID="cb749176f336c851126f4551f11cbacf0e88dbfae1cc80c50d69b24a60052128" Oct 11 05:56:46 crc kubenswrapper[4651]: I1011 05:56:46.446863 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vm7v6" Oct 11 05:56:46 crc kubenswrapper[4651]: I1011 05:56:46.466257 4651 scope.go:117] "RemoveContainer" containerID="fc5dc225c9d0f81d45dd5d987f8b64bfe0be0468677eed404b5a6e2a5bd4b66b" Oct 11 05:56:46 crc kubenswrapper[4651]: I1011 05:56:46.500835 4651 scope.go:117] "RemoveContainer" containerID="9e8b2c7ea0431fbecb93a6e3e9c4ec4550bc6c6862d9de009b591d07d4164a48" Oct 11 05:56:46 crc kubenswrapper[4651]: I1011 05:56:46.524868 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-vm7v6"] Oct 11 05:56:46 crc kubenswrapper[4651]: I1011 05:56:46.543486 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-vm7v6"] Oct 11 05:56:46 crc kubenswrapper[4651]: I1011 05:56:46.563380 4651 scope.go:117] "RemoveContainer" containerID="cb749176f336c851126f4551f11cbacf0e88dbfae1cc80c50d69b24a60052128" Oct 11 05:56:46 crc kubenswrapper[4651]: E1011 05:56:46.563780 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cb749176f336c851126f4551f11cbacf0e88dbfae1cc80c50d69b24a60052128\": container with ID starting with cb749176f336c851126f4551f11cbacf0e88dbfae1cc80c50d69b24a60052128 not found: ID does not exist" containerID="cb749176f336c851126f4551f11cbacf0e88dbfae1cc80c50d69b24a60052128" Oct 11 05:56:46 crc kubenswrapper[4651]: I1011 05:56:46.563859 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cb749176f336c851126f4551f11cbacf0e88dbfae1cc80c50d69b24a60052128"} err="failed to get container status \"cb749176f336c851126f4551f11cbacf0e88dbfae1cc80c50d69b24a60052128\": rpc error: code = NotFound desc = could not find container \"cb749176f336c851126f4551f11cbacf0e88dbfae1cc80c50d69b24a60052128\": container with ID starting with cb749176f336c851126f4551f11cbacf0e88dbfae1cc80c50d69b24a60052128 not found: ID does not exist" Oct 11 05:56:46 crc kubenswrapper[4651]: I1011 05:56:46.563890 4651 scope.go:117] "RemoveContainer" containerID="fc5dc225c9d0f81d45dd5d987f8b64bfe0be0468677eed404b5a6e2a5bd4b66b" Oct 11 05:56:46 crc kubenswrapper[4651]: E1011 05:56:46.564177 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fc5dc225c9d0f81d45dd5d987f8b64bfe0be0468677eed404b5a6e2a5bd4b66b\": container with ID starting with fc5dc225c9d0f81d45dd5d987f8b64bfe0be0468677eed404b5a6e2a5bd4b66b not found: ID does not exist" containerID="fc5dc225c9d0f81d45dd5d987f8b64bfe0be0468677eed404b5a6e2a5bd4b66b" Oct 11 05:56:46 crc kubenswrapper[4651]: I1011 05:56:46.564234 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fc5dc225c9d0f81d45dd5d987f8b64bfe0be0468677eed404b5a6e2a5bd4b66b"} err="failed to get container status \"fc5dc225c9d0f81d45dd5d987f8b64bfe0be0468677eed404b5a6e2a5bd4b66b\": rpc error: code = NotFound desc = could not find container \"fc5dc225c9d0f81d45dd5d987f8b64bfe0be0468677eed404b5a6e2a5bd4b66b\": container with ID starting with fc5dc225c9d0f81d45dd5d987f8b64bfe0be0468677eed404b5a6e2a5bd4b66b not found: ID does not exist" Oct 11 05:56:46 crc kubenswrapper[4651]: I1011 05:56:46.564263 4651 scope.go:117] "RemoveContainer" containerID="9e8b2c7ea0431fbecb93a6e3e9c4ec4550bc6c6862d9de009b591d07d4164a48" Oct 11 05:56:46 crc kubenswrapper[4651]: E1011 05:56:46.564544 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9e8b2c7ea0431fbecb93a6e3e9c4ec4550bc6c6862d9de009b591d07d4164a48\": container with ID starting with 9e8b2c7ea0431fbecb93a6e3e9c4ec4550bc6c6862d9de009b591d07d4164a48 not found: ID does not exist" containerID="9e8b2c7ea0431fbecb93a6e3e9c4ec4550bc6c6862d9de009b591d07d4164a48" Oct 11 05:56:46 crc kubenswrapper[4651]: I1011 05:56:46.564579 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9e8b2c7ea0431fbecb93a6e3e9c4ec4550bc6c6862d9de009b591d07d4164a48"} err="failed to get container status \"9e8b2c7ea0431fbecb93a6e3e9c4ec4550bc6c6862d9de009b591d07d4164a48\": rpc error: code = NotFound desc = could not find container \"9e8b2c7ea0431fbecb93a6e3e9c4ec4550bc6c6862d9de009b591d07d4164a48\": container with ID starting with 9e8b2c7ea0431fbecb93a6e3e9c4ec4550bc6c6862d9de009b591d07d4164a48 not found: ID does not exist" Oct 11 05:56:47 crc kubenswrapper[4651]: I1011 05:56:47.879313 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4b3ba716-9aa8-4cd4-baf5-d6122b10c057" path="/var/lib/kubelet/pods/4b3ba716-9aa8-4cd4-baf5-d6122b10c057/volumes" Oct 11 05:56:51 crc kubenswrapper[4651]: I1011 05:56:51.869400 4651 scope.go:117] "RemoveContainer" containerID="c912b3bc11354e42d7455ad2000403f0825b71426687440ea00315847957d4d3" Oct 11 05:56:52 crc kubenswrapper[4651]: I1011 05:56:52.499268 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" event={"ID":"519a1ae1-e964-48b0-8b61-835146df28c1","Type":"ContainerStarted","Data":"138046dd6ace80fbeb2ae1e92c9cc244c02ae0ec463960b56cf19a59f5eeaf98"} Oct 11 05:57:08 crc kubenswrapper[4651]: I1011 05:57:08.667555 4651 generic.go:334] "Generic (PLEG): container finished" podID="51e2994c-3647-4536-b336-f0fc8d5e77eb" containerID="958dd711c196508dd4e4f6fcdc9da32f5e9e4c496234e5fb0faad261d96368a3" exitCode=0 Oct 11 05:57:08 crc kubenswrapper[4651]: I1011 05:57:08.667679 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-gr7gt/crc-debug-gv7nw" event={"ID":"51e2994c-3647-4536-b336-f0fc8d5e77eb","Type":"ContainerDied","Data":"958dd711c196508dd4e4f6fcdc9da32f5e9e4c496234e5fb0faad261d96368a3"} Oct 11 05:57:09 crc kubenswrapper[4651]: I1011 05:57:09.778869 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-gr7gt/crc-debug-gv7nw" Oct 11 05:57:09 crc kubenswrapper[4651]: I1011 05:57:09.824209 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-gr7gt/crc-debug-gv7nw"] Oct 11 05:57:09 crc kubenswrapper[4651]: I1011 05:57:09.830096 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-gr7gt/crc-debug-gv7nw"] Oct 11 05:57:09 crc kubenswrapper[4651]: I1011 05:57:09.923431 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/51e2994c-3647-4536-b336-f0fc8d5e77eb-host\") pod \"51e2994c-3647-4536-b336-f0fc8d5e77eb\" (UID: \"51e2994c-3647-4536-b336-f0fc8d5e77eb\") " Oct 11 05:57:09 crc kubenswrapper[4651]: I1011 05:57:09.923864 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-84lhb\" (UniqueName: \"kubernetes.io/projected/51e2994c-3647-4536-b336-f0fc8d5e77eb-kube-api-access-84lhb\") pod \"51e2994c-3647-4536-b336-f0fc8d5e77eb\" (UID: \"51e2994c-3647-4536-b336-f0fc8d5e77eb\") " Oct 11 05:57:09 crc kubenswrapper[4651]: I1011 05:57:09.923541 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/51e2994c-3647-4536-b336-f0fc8d5e77eb-host" (OuterVolumeSpecName: "host") pod "51e2994c-3647-4536-b336-f0fc8d5e77eb" (UID: "51e2994c-3647-4536-b336-f0fc8d5e77eb"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 11 05:57:09 crc kubenswrapper[4651]: I1011 05:57:09.936998 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/51e2994c-3647-4536-b336-f0fc8d5e77eb-kube-api-access-84lhb" (OuterVolumeSpecName: "kube-api-access-84lhb") pod "51e2994c-3647-4536-b336-f0fc8d5e77eb" (UID: "51e2994c-3647-4536-b336-f0fc8d5e77eb"). InnerVolumeSpecName "kube-api-access-84lhb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:57:10 crc kubenswrapper[4651]: I1011 05:57:10.029371 4651 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/51e2994c-3647-4536-b336-f0fc8d5e77eb-host\") on node \"crc\" DevicePath \"\"" Oct 11 05:57:10 crc kubenswrapper[4651]: I1011 05:57:10.029408 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-84lhb\" (UniqueName: \"kubernetes.io/projected/51e2994c-3647-4536-b336-f0fc8d5e77eb-kube-api-access-84lhb\") on node \"crc\" DevicePath \"\"" Oct 11 05:57:10 crc kubenswrapper[4651]: I1011 05:57:10.687467 4651 scope.go:117] "RemoveContainer" containerID="958dd711c196508dd4e4f6fcdc9da32f5e9e4c496234e5fb0faad261d96368a3" Oct 11 05:57:10 crc kubenswrapper[4651]: I1011 05:57:10.687502 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-gr7gt/crc-debug-gv7nw" Oct 11 05:57:11 crc kubenswrapper[4651]: I1011 05:57:11.023290 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-gr7gt/crc-debug-6897c"] Oct 11 05:57:11 crc kubenswrapper[4651]: E1011 05:57:11.024245 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b3ba716-9aa8-4cd4-baf5-d6122b10c057" containerName="extract-utilities" Oct 11 05:57:11 crc kubenswrapper[4651]: I1011 05:57:11.024276 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b3ba716-9aa8-4cd4-baf5-d6122b10c057" containerName="extract-utilities" Oct 11 05:57:11 crc kubenswrapper[4651]: E1011 05:57:11.024309 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="51e2994c-3647-4536-b336-f0fc8d5e77eb" containerName="container-00" Oct 11 05:57:11 crc kubenswrapper[4651]: I1011 05:57:11.024321 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="51e2994c-3647-4536-b336-f0fc8d5e77eb" containerName="container-00" Oct 11 05:57:11 crc kubenswrapper[4651]: E1011 05:57:11.024369 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b3ba716-9aa8-4cd4-baf5-d6122b10c057" containerName="extract-content" Oct 11 05:57:11 crc kubenswrapper[4651]: I1011 05:57:11.024381 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b3ba716-9aa8-4cd4-baf5-d6122b10c057" containerName="extract-content" Oct 11 05:57:11 crc kubenswrapper[4651]: E1011 05:57:11.024474 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b3ba716-9aa8-4cd4-baf5-d6122b10c057" containerName="registry-server" Oct 11 05:57:11 crc kubenswrapper[4651]: I1011 05:57:11.024489 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b3ba716-9aa8-4cd4-baf5-d6122b10c057" containerName="registry-server" Oct 11 05:57:11 crc kubenswrapper[4651]: I1011 05:57:11.024787 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="51e2994c-3647-4536-b336-f0fc8d5e77eb" containerName="container-00" Oct 11 05:57:11 crc kubenswrapper[4651]: I1011 05:57:11.024849 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="4b3ba716-9aa8-4cd4-baf5-d6122b10c057" containerName="registry-server" Oct 11 05:57:11 crc kubenswrapper[4651]: I1011 05:57:11.025777 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-gr7gt/crc-debug-6897c" Oct 11 05:57:11 crc kubenswrapper[4651]: I1011 05:57:11.150964 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4dp6q\" (UniqueName: \"kubernetes.io/projected/9783e11a-b693-4751-a898-ce25464388c6-kube-api-access-4dp6q\") pod \"crc-debug-6897c\" (UID: \"9783e11a-b693-4751-a898-ce25464388c6\") " pod="openshift-must-gather-gr7gt/crc-debug-6897c" Oct 11 05:57:11 crc kubenswrapper[4651]: I1011 05:57:11.151120 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9783e11a-b693-4751-a898-ce25464388c6-host\") pod \"crc-debug-6897c\" (UID: \"9783e11a-b693-4751-a898-ce25464388c6\") " pod="openshift-must-gather-gr7gt/crc-debug-6897c" Oct 11 05:57:11 crc kubenswrapper[4651]: I1011 05:57:11.252366 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4dp6q\" (UniqueName: \"kubernetes.io/projected/9783e11a-b693-4751-a898-ce25464388c6-kube-api-access-4dp6q\") pod \"crc-debug-6897c\" (UID: \"9783e11a-b693-4751-a898-ce25464388c6\") " pod="openshift-must-gather-gr7gt/crc-debug-6897c" Oct 11 05:57:11 crc kubenswrapper[4651]: I1011 05:57:11.252451 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9783e11a-b693-4751-a898-ce25464388c6-host\") pod \"crc-debug-6897c\" (UID: \"9783e11a-b693-4751-a898-ce25464388c6\") " pod="openshift-must-gather-gr7gt/crc-debug-6897c" Oct 11 05:57:11 crc kubenswrapper[4651]: I1011 05:57:11.252576 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9783e11a-b693-4751-a898-ce25464388c6-host\") pod \"crc-debug-6897c\" (UID: \"9783e11a-b693-4751-a898-ce25464388c6\") " pod="openshift-must-gather-gr7gt/crc-debug-6897c" Oct 11 05:57:11 crc kubenswrapper[4651]: I1011 05:57:11.281324 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4dp6q\" (UniqueName: \"kubernetes.io/projected/9783e11a-b693-4751-a898-ce25464388c6-kube-api-access-4dp6q\") pod \"crc-debug-6897c\" (UID: \"9783e11a-b693-4751-a898-ce25464388c6\") " pod="openshift-must-gather-gr7gt/crc-debug-6897c" Oct 11 05:57:11 crc kubenswrapper[4651]: I1011 05:57:11.342594 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-gr7gt/crc-debug-6897c" Oct 11 05:57:11 crc kubenswrapper[4651]: W1011 05:57:11.396485 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9783e11a_b693_4751_a898_ce25464388c6.slice/crio-8ec81c6c0c0c85b5788e6f1a262f7d1f47846751e9e03f86bb78e237d0731c28 WatchSource:0}: Error finding container 8ec81c6c0c0c85b5788e6f1a262f7d1f47846751e9e03f86bb78e237d0731c28: Status 404 returned error can't find the container with id 8ec81c6c0c0c85b5788e6f1a262f7d1f47846751e9e03f86bb78e237d0731c28 Oct 11 05:57:11 crc kubenswrapper[4651]: I1011 05:57:11.700445 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-gr7gt/crc-debug-6897c" event={"ID":"9783e11a-b693-4751-a898-ce25464388c6","Type":"ContainerStarted","Data":"c14c5bde63f8c30df4bcfab229a9c60dd355db5633f674b5e9480e14858b8942"} Oct 11 05:57:11 crc kubenswrapper[4651]: I1011 05:57:11.700883 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-gr7gt/crc-debug-6897c" event={"ID":"9783e11a-b693-4751-a898-ce25464388c6","Type":"ContainerStarted","Data":"8ec81c6c0c0c85b5788e6f1a262f7d1f47846751e9e03f86bb78e237d0731c28"} Oct 11 05:57:11 crc kubenswrapper[4651]: I1011 05:57:11.725003 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-gr7gt/crc-debug-6897c" podStartSLOduration=0.724980861 podStartE2EDuration="724.980861ms" podCreationTimestamp="2025-10-11 05:57:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 05:57:11.718314179 +0000 UTC m=+3952.614546985" watchObservedRunningTime="2025-10-11 05:57:11.724980861 +0000 UTC m=+3952.621213668" Oct 11 05:57:11 crc kubenswrapper[4651]: I1011 05:57:11.884746 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="51e2994c-3647-4536-b336-f0fc8d5e77eb" path="/var/lib/kubelet/pods/51e2994c-3647-4536-b336-f0fc8d5e77eb/volumes" Oct 11 05:57:12 crc kubenswrapper[4651]: I1011 05:57:12.710076 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-gr7gt/crc-debug-6897c" event={"ID":"9783e11a-b693-4751-a898-ce25464388c6","Type":"ContainerDied","Data":"c14c5bde63f8c30df4bcfab229a9c60dd355db5633f674b5e9480e14858b8942"} Oct 11 05:57:12 crc kubenswrapper[4651]: I1011 05:57:12.709981 4651 generic.go:334] "Generic (PLEG): container finished" podID="9783e11a-b693-4751-a898-ce25464388c6" containerID="c14c5bde63f8c30df4bcfab229a9c60dd355db5633f674b5e9480e14858b8942" exitCode=0 Oct 11 05:57:13 crc kubenswrapper[4651]: I1011 05:57:13.841001 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-gr7gt/crc-debug-6897c" Oct 11 05:57:13 crc kubenswrapper[4651]: I1011 05:57:13.889488 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-gr7gt/crc-debug-6897c"] Oct 11 05:57:13 crc kubenswrapper[4651]: I1011 05:57:13.903194 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-gr7gt/crc-debug-6897c"] Oct 11 05:57:14 crc kubenswrapper[4651]: I1011 05:57:14.006039 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4dp6q\" (UniqueName: \"kubernetes.io/projected/9783e11a-b693-4751-a898-ce25464388c6-kube-api-access-4dp6q\") pod \"9783e11a-b693-4751-a898-ce25464388c6\" (UID: \"9783e11a-b693-4751-a898-ce25464388c6\") " Oct 11 05:57:14 crc kubenswrapper[4651]: I1011 05:57:14.006145 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9783e11a-b693-4751-a898-ce25464388c6-host\") pod \"9783e11a-b693-4751-a898-ce25464388c6\" (UID: \"9783e11a-b693-4751-a898-ce25464388c6\") " Oct 11 05:57:14 crc kubenswrapper[4651]: I1011 05:57:14.006307 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9783e11a-b693-4751-a898-ce25464388c6-host" (OuterVolumeSpecName: "host") pod "9783e11a-b693-4751-a898-ce25464388c6" (UID: "9783e11a-b693-4751-a898-ce25464388c6"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 11 05:57:14 crc kubenswrapper[4651]: I1011 05:57:14.006914 4651 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9783e11a-b693-4751-a898-ce25464388c6-host\") on node \"crc\" DevicePath \"\"" Oct 11 05:57:14 crc kubenswrapper[4651]: I1011 05:57:14.016629 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9783e11a-b693-4751-a898-ce25464388c6-kube-api-access-4dp6q" (OuterVolumeSpecName: "kube-api-access-4dp6q") pod "9783e11a-b693-4751-a898-ce25464388c6" (UID: "9783e11a-b693-4751-a898-ce25464388c6"). InnerVolumeSpecName "kube-api-access-4dp6q". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:57:14 crc kubenswrapper[4651]: I1011 05:57:14.109907 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4dp6q\" (UniqueName: \"kubernetes.io/projected/9783e11a-b693-4751-a898-ce25464388c6-kube-api-access-4dp6q\") on node \"crc\" DevicePath \"\"" Oct 11 05:57:14 crc kubenswrapper[4651]: I1011 05:57:14.733872 4651 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8ec81c6c0c0c85b5788e6f1a262f7d1f47846751e9e03f86bb78e237d0731c28" Oct 11 05:57:14 crc kubenswrapper[4651]: I1011 05:57:14.734374 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-gr7gt/crc-debug-6897c" Oct 11 05:57:15 crc kubenswrapper[4651]: I1011 05:57:15.074313 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-gr7gt/crc-debug-cf5qh"] Oct 11 05:57:15 crc kubenswrapper[4651]: E1011 05:57:15.074713 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9783e11a-b693-4751-a898-ce25464388c6" containerName="container-00" Oct 11 05:57:15 crc kubenswrapper[4651]: I1011 05:57:15.074725 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="9783e11a-b693-4751-a898-ce25464388c6" containerName="container-00" Oct 11 05:57:15 crc kubenswrapper[4651]: I1011 05:57:15.074918 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="9783e11a-b693-4751-a898-ce25464388c6" containerName="container-00" Oct 11 05:57:15 crc kubenswrapper[4651]: I1011 05:57:15.075502 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-gr7gt/crc-debug-cf5qh" Oct 11 05:57:15 crc kubenswrapper[4651]: I1011 05:57:15.231507 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zsrfl\" (UniqueName: \"kubernetes.io/projected/f74e2ba9-5b70-4eb3-978f-2575f7894973-kube-api-access-zsrfl\") pod \"crc-debug-cf5qh\" (UID: \"f74e2ba9-5b70-4eb3-978f-2575f7894973\") " pod="openshift-must-gather-gr7gt/crc-debug-cf5qh" Oct 11 05:57:15 crc kubenswrapper[4651]: I1011 05:57:15.231902 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f74e2ba9-5b70-4eb3-978f-2575f7894973-host\") pod \"crc-debug-cf5qh\" (UID: \"f74e2ba9-5b70-4eb3-978f-2575f7894973\") " pod="openshift-must-gather-gr7gt/crc-debug-cf5qh" Oct 11 05:57:15 crc kubenswrapper[4651]: I1011 05:57:15.334162 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zsrfl\" (UniqueName: \"kubernetes.io/projected/f74e2ba9-5b70-4eb3-978f-2575f7894973-kube-api-access-zsrfl\") pod \"crc-debug-cf5qh\" (UID: \"f74e2ba9-5b70-4eb3-978f-2575f7894973\") " pod="openshift-must-gather-gr7gt/crc-debug-cf5qh" Oct 11 05:57:15 crc kubenswrapper[4651]: I1011 05:57:15.334290 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f74e2ba9-5b70-4eb3-978f-2575f7894973-host\") pod \"crc-debug-cf5qh\" (UID: \"f74e2ba9-5b70-4eb3-978f-2575f7894973\") " pod="openshift-must-gather-gr7gt/crc-debug-cf5qh" Oct 11 05:57:15 crc kubenswrapper[4651]: I1011 05:57:15.334402 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f74e2ba9-5b70-4eb3-978f-2575f7894973-host\") pod \"crc-debug-cf5qh\" (UID: \"f74e2ba9-5b70-4eb3-978f-2575f7894973\") " pod="openshift-must-gather-gr7gt/crc-debug-cf5qh" Oct 11 05:57:15 crc kubenswrapper[4651]: I1011 05:57:15.355726 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zsrfl\" (UniqueName: \"kubernetes.io/projected/f74e2ba9-5b70-4eb3-978f-2575f7894973-kube-api-access-zsrfl\") pod \"crc-debug-cf5qh\" (UID: \"f74e2ba9-5b70-4eb3-978f-2575f7894973\") " pod="openshift-must-gather-gr7gt/crc-debug-cf5qh" Oct 11 05:57:15 crc kubenswrapper[4651]: I1011 05:57:15.396357 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-gr7gt/crc-debug-cf5qh" Oct 11 05:57:15 crc kubenswrapper[4651]: W1011 05:57:15.430413 4651 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf74e2ba9_5b70_4eb3_978f_2575f7894973.slice/crio-310e462fe2ea6c72e248f0a85e81f2fb25a23055311b0307836170418c6929a5 WatchSource:0}: Error finding container 310e462fe2ea6c72e248f0a85e81f2fb25a23055311b0307836170418c6929a5: Status 404 returned error can't find the container with id 310e462fe2ea6c72e248f0a85e81f2fb25a23055311b0307836170418c6929a5 Oct 11 05:57:15 crc kubenswrapper[4651]: I1011 05:57:15.743801 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-gr7gt/crc-debug-cf5qh" event={"ID":"f74e2ba9-5b70-4eb3-978f-2575f7894973","Type":"ContainerStarted","Data":"eca226c3f64f4d4aab00bf434dc370b1abfa2518054932b750c801a671d2d0fb"} Oct 11 05:57:15 crc kubenswrapper[4651]: I1011 05:57:15.744298 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-gr7gt/crc-debug-cf5qh" event={"ID":"f74e2ba9-5b70-4eb3-978f-2575f7894973","Type":"ContainerStarted","Data":"310e462fe2ea6c72e248f0a85e81f2fb25a23055311b0307836170418c6929a5"} Oct 11 05:57:15 crc kubenswrapper[4651]: I1011 05:57:15.784403 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-gr7gt/crc-debug-cf5qh"] Oct 11 05:57:15 crc kubenswrapper[4651]: I1011 05:57:15.795675 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-gr7gt/crc-debug-cf5qh"] Oct 11 05:57:15 crc kubenswrapper[4651]: I1011 05:57:15.889177 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9783e11a-b693-4751-a898-ce25464388c6" path="/var/lib/kubelet/pods/9783e11a-b693-4751-a898-ce25464388c6/volumes" Oct 11 05:57:16 crc kubenswrapper[4651]: I1011 05:57:16.756273 4651 generic.go:334] "Generic (PLEG): container finished" podID="f74e2ba9-5b70-4eb3-978f-2575f7894973" containerID="eca226c3f64f4d4aab00bf434dc370b1abfa2518054932b750c801a671d2d0fb" exitCode=0 Oct 11 05:57:16 crc kubenswrapper[4651]: I1011 05:57:16.907137 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-gr7gt/crc-debug-cf5qh" Oct 11 05:57:17 crc kubenswrapper[4651]: I1011 05:57:17.066761 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f74e2ba9-5b70-4eb3-978f-2575f7894973-host\") pod \"f74e2ba9-5b70-4eb3-978f-2575f7894973\" (UID: \"f74e2ba9-5b70-4eb3-978f-2575f7894973\") " Oct 11 05:57:17 crc kubenswrapper[4651]: I1011 05:57:17.066969 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f74e2ba9-5b70-4eb3-978f-2575f7894973-host" (OuterVolumeSpecName: "host") pod "f74e2ba9-5b70-4eb3-978f-2575f7894973" (UID: "f74e2ba9-5b70-4eb3-978f-2575f7894973"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 11 05:57:17 crc kubenswrapper[4651]: I1011 05:57:17.067021 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zsrfl\" (UniqueName: \"kubernetes.io/projected/f74e2ba9-5b70-4eb3-978f-2575f7894973-kube-api-access-zsrfl\") pod \"f74e2ba9-5b70-4eb3-978f-2575f7894973\" (UID: \"f74e2ba9-5b70-4eb3-978f-2575f7894973\") " Oct 11 05:57:17 crc kubenswrapper[4651]: I1011 05:57:17.068044 4651 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f74e2ba9-5b70-4eb3-978f-2575f7894973-host\") on node \"crc\" DevicePath \"\"" Oct 11 05:57:17 crc kubenswrapper[4651]: I1011 05:57:17.074583 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f74e2ba9-5b70-4eb3-978f-2575f7894973-kube-api-access-zsrfl" (OuterVolumeSpecName: "kube-api-access-zsrfl") pod "f74e2ba9-5b70-4eb3-978f-2575f7894973" (UID: "f74e2ba9-5b70-4eb3-978f-2575f7894973"). InnerVolumeSpecName "kube-api-access-zsrfl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:57:17 crc kubenswrapper[4651]: I1011 05:57:17.170171 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zsrfl\" (UniqueName: \"kubernetes.io/projected/f74e2ba9-5b70-4eb3-978f-2575f7894973-kube-api-access-zsrfl\") on node \"crc\" DevicePath \"\"" Oct 11 05:57:17 crc kubenswrapper[4651]: I1011 05:57:17.767060 4651 scope.go:117] "RemoveContainer" containerID="eca226c3f64f4d4aab00bf434dc370b1abfa2518054932b750c801a671d2d0fb" Oct 11 05:57:17 crc kubenswrapper[4651]: I1011 05:57:17.767161 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-gr7gt/crc-debug-cf5qh" Oct 11 05:57:17 crc kubenswrapper[4651]: I1011 05:57:17.884355 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f74e2ba9-5b70-4eb3-978f-2575f7894973" path="/var/lib/kubelet/pods/f74e2ba9-5b70-4eb3-978f-2575f7894973/volumes" Oct 11 05:57:34 crc kubenswrapper[4651]: I1011 05:57:34.690630 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-694f8cb944-jlqsz_5c85be3f-c6fd-4d66-95ba-87b1502b5548/barbican-api/0.log" Oct 11 05:57:34 crc kubenswrapper[4651]: I1011 05:57:34.796629 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-694f8cb944-jlqsz_5c85be3f-c6fd-4d66-95ba-87b1502b5548/barbican-api-log/0.log" Oct 11 05:57:34 crc kubenswrapper[4651]: I1011 05:57:34.915761 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-65475785f8-ljsqt_3d5d4c12-9a64-4b51-9613-7d8905d3367f/barbican-keystone-listener/0.log" Oct 11 05:57:34 crc kubenswrapper[4651]: I1011 05:57:34.936859 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-65475785f8-ljsqt_3d5d4c12-9a64-4b51-9613-7d8905d3367f/barbican-keystone-listener-log/0.log" Oct 11 05:57:35 crc kubenswrapper[4651]: I1011 05:57:35.086096 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-6b84468647-bq8d6_0a7fc0ac-3c48-4cd1-9cbd-78eca125768d/barbican-worker/0.log" Oct 11 05:57:35 crc kubenswrapper[4651]: I1011 05:57:35.127199 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-6b84468647-bq8d6_0a7fc0ac-3c48-4cd1-9cbd-78eca125768d/barbican-worker-log/0.log" Oct 11 05:57:35 crc kubenswrapper[4651]: I1011 05:57:35.314937 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-h2pxs_073ca1d1-d406-4d47-bfdd-1d1ccc6a0444/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Oct 11 05:57:35 crc kubenswrapper[4651]: I1011 05:57:35.351031 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_123a74ae-c8a8-467a-b358-b13ac2cff461/ceilometer-central-agent/0.log" Oct 11 05:57:35 crc kubenswrapper[4651]: I1011 05:57:35.412150 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_123a74ae-c8a8-467a-b358-b13ac2cff461/ceilometer-notification-agent/0.log" Oct 11 05:57:35 crc kubenswrapper[4651]: I1011 05:57:35.500754 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_123a74ae-c8a8-467a-b358-b13ac2cff461/sg-core/0.log" Oct 11 05:57:35 crc kubenswrapper[4651]: I1011 05:57:35.547903 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_123a74ae-c8a8-467a-b358-b13ac2cff461/proxy-httpd/0.log" Oct 11 05:57:35 crc kubenswrapper[4651]: I1011 05:57:35.670421 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_6b53ec0e-8ba3-47a6-9715-84fe1ca28ff1/cinder-api/0.log" Oct 11 05:57:35 crc kubenswrapper[4651]: I1011 05:57:35.691328 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_6b53ec0e-8ba3-47a6-9715-84fe1ca28ff1/cinder-api-log/0.log" Oct 11 05:57:35 crc kubenswrapper[4651]: I1011 05:57:35.846969 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_9bee9214-1b63-4ef6-81ca-507ef630559b/cinder-scheduler/0.log" Oct 11 05:57:35 crc kubenswrapper[4651]: I1011 05:57:35.866963 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_9bee9214-1b63-4ef6-81ca-507ef630559b/probe/0.log" Oct 11 05:57:35 crc kubenswrapper[4651]: I1011 05:57:35.994691 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-jq2ff_6634acd3-8550-4286-ad94-004cfe4c7def/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Oct 11 05:57:36 crc kubenswrapper[4651]: I1011 05:57:36.057023 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-5wgh8_bd4b257e-2d94-4f78-9ff5-cef288fd0858/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 11 05:57:36 crc kubenswrapper[4651]: I1011 05:57:36.183202 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-zjfbr_18c28764-b000-46b3-af99-9410c165ff04/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 11 05:57:36 crc kubenswrapper[4651]: I1011 05:57:36.310790 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-8c6f6df99-jzqc8_6db2b942-f99f-417d-aff6-a37800db6a41/init/0.log" Oct 11 05:57:36 crc kubenswrapper[4651]: I1011 05:57:36.450070 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-8c6f6df99-jzqc8_6db2b942-f99f-417d-aff6-a37800db6a41/dnsmasq-dns/0.log" Oct 11 05:57:36 crc kubenswrapper[4651]: I1011 05:57:36.465476 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-8c6f6df99-jzqc8_6db2b942-f99f-417d-aff6-a37800db6a41/init/0.log" Oct 11 05:57:36 crc kubenswrapper[4651]: I1011 05:57:36.518796 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-xqtc2_0428933e-bd0d-4be4-94a6-25caf11d1f23/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Oct 11 05:57:36 crc kubenswrapper[4651]: I1011 05:57:36.667472 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_ee7479a2-fd16-4ec2-a0e0-28fbaffbce9f/glance-httpd/0.log" Oct 11 05:57:36 crc kubenswrapper[4651]: I1011 05:57:36.741313 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_ee7479a2-fd16-4ec2-a0e0-28fbaffbce9f/glance-log/0.log" Oct 11 05:57:36 crc kubenswrapper[4651]: I1011 05:57:36.864192 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_3a991cdf-ad8d-4392-bb4e-792e607d740c/glance-httpd/0.log" Oct 11 05:57:36 crc kubenswrapper[4651]: I1011 05:57:36.911603 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_3a991cdf-ad8d-4392-bb4e-792e607d740c/glance-log/0.log" Oct 11 05:57:37 crc kubenswrapper[4651]: I1011 05:57:37.061446 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-5f7d84485b-zb5s7_a75ab800-fe1f-453d-b9f9-1cc1f39b6ca7/horizon/0.log" Oct 11 05:57:37 crc kubenswrapper[4651]: I1011 05:57:37.184466 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-xvd4r_751c31c3-37b5-4b70-89ba-3c15aee1b7c3/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Oct 11 05:57:37 crc kubenswrapper[4651]: I1011 05:57:37.419182 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-bbbdw_b6609904-4dac-496c-b95b-583873422810/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 11 05:57:37 crc kubenswrapper[4651]: I1011 05:57:37.427521 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-5f7d84485b-zb5s7_a75ab800-fe1f-453d-b9f9-1cc1f39b6ca7/horizon-log/0.log" Oct 11 05:57:37 crc kubenswrapper[4651]: I1011 05:57:37.605352 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-5dbc5c6b84-lhwcw_89ac3499-9018-4545-9e5f-f6eda0d14302/keystone-api/0.log" Oct 11 05:57:37 crc kubenswrapper[4651]: I1011 05:57:37.665362 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_4fbfad70-f21a-4362-9b53-c955b9cca958/kube-state-metrics/0.log" Oct 11 05:57:37 crc kubenswrapper[4651]: I1011 05:57:37.740223 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-jjxtk_6d343a98-7fde-4f8c-995f-39a826aa5f12/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Oct 11 05:57:38 crc kubenswrapper[4651]: I1011 05:57:38.023164 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-6856c774b5-fq9r6_301b63d9-53a7-49b2-9d71-2b2bf854de89/neutron-httpd/0.log" Oct 11 05:57:38 crc kubenswrapper[4651]: I1011 05:57:38.050635 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-6856c774b5-fq9r6_301b63d9-53a7-49b2-9d71-2b2bf854de89/neutron-api/0.log" Oct 11 05:57:38 crc kubenswrapper[4651]: I1011 05:57:38.103147 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-whlk4_6e7f2e9b-b154-4d49-beea-654732761981/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Oct 11 05:57:38 crc kubenswrapper[4651]: I1011 05:57:38.656402 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_4791f0e9-7fb3-4b2e-8e67-52e1eb0fde76/nova-api-log/0.log" Oct 11 05:57:38 crc kubenswrapper[4651]: I1011 05:57:38.768161 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_7a57592b-84ea-4d7c-ae5d-fcb7c009cbb0/nova-cell0-conductor-conductor/0.log" Oct 11 05:57:38 crc kubenswrapper[4651]: I1011 05:57:38.958285 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_4791f0e9-7fb3-4b2e-8e67-52e1eb0fde76/nova-api-api/0.log" Oct 11 05:57:39 crc kubenswrapper[4651]: I1011 05:57:39.174109 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_d2b4f087-b74d-4683-815d-35b6f7736f04/nova-cell1-conductor-conductor/0.log" Oct 11 05:57:39 crc kubenswrapper[4651]: I1011 05:57:39.183172 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_d986aa9a-c031-4a99-b2d8-6c09be2fc264/nova-cell1-novncproxy-novncproxy/0.log" Oct 11 05:57:39 crc kubenswrapper[4651]: I1011 05:57:39.270986 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-t4rtg_c2b4841c-2ea3-464b-8147-a24437d0d079/nova-edpm-deployment-openstack-edpm-ipam/0.log" Oct 11 05:57:39 crc kubenswrapper[4651]: I1011 05:57:39.530047 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_7d5fc748-aa4e-45c9-a268-9fdc8b2ae358/nova-metadata-log/0.log" Oct 11 05:57:39 crc kubenswrapper[4651]: I1011 05:57:39.730447 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_c7fc16c5-4cac-4da2-82d1-226d056fe645/mysql-bootstrap/0.log" Oct 11 05:57:39 crc kubenswrapper[4651]: I1011 05:57:39.834127 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_14edb1d7-f645-4c9e-8363-0342719b2457/nova-scheduler-scheduler/0.log" Oct 11 05:57:39 crc kubenswrapper[4651]: I1011 05:57:39.958216 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_c7fc16c5-4cac-4da2-82d1-226d056fe645/mysql-bootstrap/0.log" Oct 11 05:57:40 crc kubenswrapper[4651]: I1011 05:57:40.033436 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_c7fc16c5-4cac-4da2-82d1-226d056fe645/galera/0.log" Oct 11 05:57:40 crc kubenswrapper[4651]: I1011 05:57:40.184061 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_6e4d1f39-f0c4-4a19-a525-d0119d4b77e5/mysql-bootstrap/0.log" Oct 11 05:57:40 crc kubenswrapper[4651]: I1011 05:57:40.340530 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_6e4d1f39-f0c4-4a19-a525-d0119d4b77e5/galera/0.log" Oct 11 05:57:40 crc kubenswrapper[4651]: I1011 05:57:40.370365 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_6e4d1f39-f0c4-4a19-a525-d0119d4b77e5/mysql-bootstrap/0.log" Oct 11 05:57:40 crc kubenswrapper[4651]: I1011 05:57:40.566652 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_47aacf69-f6eb-4e85-9b70-8f241bfa812f/openstackclient/0.log" Oct 11 05:57:40 crc kubenswrapper[4651]: I1011 05:57:40.670928 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-4gdx9_058302c4-d304-4a99-afbc-84a558968cfe/ovn-controller/0.log" Oct 11 05:57:40 crc kubenswrapper[4651]: I1011 05:57:40.801216 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_7d5fc748-aa4e-45c9-a268-9fdc8b2ae358/nova-metadata-metadata/0.log" Oct 11 05:57:40 crc kubenswrapper[4651]: I1011 05:57:40.803392 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-6c9fd_6e416ba6-0c00-41f4-857d-3c53c9179e6b/openstack-network-exporter/0.log" Oct 11 05:57:40 crc kubenswrapper[4651]: I1011 05:57:40.966854 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-2rjqb_15e07425-1bc8-43c4-ab2a-9daf9e9f95bb/ovsdb-server-init/0.log" Oct 11 05:57:41 crc kubenswrapper[4651]: I1011 05:57:41.218539 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-2rjqb_15e07425-1bc8-43c4-ab2a-9daf9e9f95bb/ovsdb-server-init/0.log" Oct 11 05:57:41 crc kubenswrapper[4651]: I1011 05:57:41.229115 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-2rjqb_15e07425-1bc8-43c4-ab2a-9daf9e9f95bb/ovsdb-server/0.log" Oct 11 05:57:41 crc kubenswrapper[4651]: I1011 05:57:41.262755 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-2rjqb_15e07425-1bc8-43c4-ab2a-9daf9e9f95bb/ovs-vswitchd/0.log" Oct 11 05:57:41 crc kubenswrapper[4651]: I1011 05:57:41.471868 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-cfgdk_b0507706-1820-417c-824e-e8420fda7baa/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Oct 11 05:57:41 crc kubenswrapper[4651]: I1011 05:57:41.520049 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_a7f3b01c-ab92-45f7-9e89-a76a93a8db6a/ovn-northd/0.log" Oct 11 05:57:41 crc kubenswrapper[4651]: I1011 05:57:41.548015 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_a7f3b01c-ab92-45f7-9e89-a76a93a8db6a/openstack-network-exporter/0.log" Oct 11 05:57:41 crc kubenswrapper[4651]: I1011 05:57:41.756731 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_66e1ea71-4579-48c7-b0c9-8074d1a6f821/ovsdbserver-nb/0.log" Oct 11 05:57:41 crc kubenswrapper[4651]: I1011 05:57:41.767431 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_66e1ea71-4579-48c7-b0c9-8074d1a6f821/openstack-network-exporter/0.log" Oct 11 05:57:41 crc kubenswrapper[4651]: I1011 05:57:41.933898 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_0ddbe514-235a-4191-9f6b-3d785b0b4d21/ovsdbserver-sb/0.log" Oct 11 05:57:41 crc kubenswrapper[4651]: I1011 05:57:41.948630 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_0ddbe514-235a-4191-9f6b-3d785b0b4d21/openstack-network-exporter/0.log" Oct 11 05:57:42 crc kubenswrapper[4651]: I1011 05:57:42.040487 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-74cbfd888-nqwlq_c777922e-553b-44ec-84c1-4b3f6644701b/placement-api/0.log" Oct 11 05:57:42 crc kubenswrapper[4651]: I1011 05:57:42.215070 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-74cbfd888-nqwlq_c777922e-553b-44ec-84c1-4b3f6644701b/placement-log/0.log" Oct 11 05:57:42 crc kubenswrapper[4651]: I1011 05:57:42.263780 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_67140304-66cc-425b-a21c-b09bb0c83b8a/setup-container/0.log" Oct 11 05:57:42 crc kubenswrapper[4651]: I1011 05:57:42.397018 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_67140304-66cc-425b-a21c-b09bb0c83b8a/rabbitmq/0.log" Oct 11 05:57:42 crc kubenswrapper[4651]: I1011 05:57:42.419427 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_67140304-66cc-425b-a21c-b09bb0c83b8a/setup-container/0.log" Oct 11 05:57:42 crc kubenswrapper[4651]: I1011 05:57:42.514073 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_f157ed46-75e8-4f03-b4ec-1234385015bd/setup-container/0.log" Oct 11 05:57:42 crc kubenswrapper[4651]: I1011 05:57:42.730171 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_f157ed46-75e8-4f03-b4ec-1234385015bd/setup-container/0.log" Oct 11 05:57:42 crc kubenswrapper[4651]: I1011 05:57:42.736179 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_f157ed46-75e8-4f03-b4ec-1234385015bd/rabbitmq/0.log" Oct 11 05:57:42 crc kubenswrapper[4651]: I1011 05:57:42.742031 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-dsw67_c5dbb723-ec78-4c80-a7fe-10d7499493c7/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 11 05:57:42 crc kubenswrapper[4651]: I1011 05:57:42.927003 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-kwqbl_f8169fed-dda6-4c75-8a3c-4ecd3b7e1866/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Oct 11 05:57:43 crc kubenswrapper[4651]: I1011 05:57:43.041207 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-jl25m_fdd938c5-0eb6-402b-9ee3-28bd04fbd55e/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Oct 11 05:57:43 crc kubenswrapper[4651]: I1011 05:57:43.165129 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-6g9mw_cbfb3a24-45a0-4455-984c-134812231d47/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 11 05:57:43 crc kubenswrapper[4651]: I1011 05:57:43.290765 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-7vjzq_0dfc1301-868d-4226-917c-475041f220f5/ssh-known-hosts-edpm-deployment/0.log" Oct 11 05:57:43 crc kubenswrapper[4651]: I1011 05:57:43.558122 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-695465f8bc-lbxbx_e8259a36-62ec-4cdc-b377-3574bf0bead5/proxy-server/0.log" Oct 11 05:57:43 crc kubenswrapper[4651]: I1011 05:57:43.593519 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-695465f8bc-lbxbx_e8259a36-62ec-4cdc-b377-3574bf0bead5/proxy-httpd/0.log" Oct 11 05:57:43 crc kubenswrapper[4651]: I1011 05:57:43.627153 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-l2gmr_35bb96b9-93d2-4ab5-a102-11f093a29144/swift-ring-rebalance/0.log" Oct 11 05:57:43 crc kubenswrapper[4651]: I1011 05:57:43.785580 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_aef9d930-4287-490f-85c7-5a791f985a77/account-auditor/0.log" Oct 11 05:57:43 crc kubenswrapper[4651]: I1011 05:57:43.786736 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_aef9d930-4287-490f-85c7-5a791f985a77/account-reaper/0.log" Oct 11 05:57:43 crc kubenswrapper[4651]: I1011 05:57:43.875446 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_aef9d930-4287-490f-85c7-5a791f985a77/account-replicator/0.log" Oct 11 05:57:43 crc kubenswrapper[4651]: I1011 05:57:43.996583 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_aef9d930-4287-490f-85c7-5a791f985a77/account-server/0.log" Oct 11 05:57:44 crc kubenswrapper[4651]: I1011 05:57:44.069319 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_aef9d930-4287-490f-85c7-5a791f985a77/container-auditor/0.log" Oct 11 05:57:44 crc kubenswrapper[4651]: I1011 05:57:44.110558 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_aef9d930-4287-490f-85c7-5a791f985a77/container-server/0.log" Oct 11 05:57:44 crc kubenswrapper[4651]: I1011 05:57:44.138957 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_aef9d930-4287-490f-85c7-5a791f985a77/container-replicator/0.log" Oct 11 05:57:44 crc kubenswrapper[4651]: I1011 05:57:44.225725 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_aef9d930-4287-490f-85c7-5a791f985a77/container-updater/0.log" Oct 11 05:57:44 crc kubenswrapper[4651]: I1011 05:57:44.285768 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_aef9d930-4287-490f-85c7-5a791f985a77/object-expirer/0.log" Oct 11 05:57:44 crc kubenswrapper[4651]: I1011 05:57:44.310575 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_aef9d930-4287-490f-85c7-5a791f985a77/object-auditor/0.log" Oct 11 05:57:44 crc kubenswrapper[4651]: I1011 05:57:44.375340 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_aef9d930-4287-490f-85c7-5a791f985a77/object-replicator/0.log" Oct 11 05:57:44 crc kubenswrapper[4651]: I1011 05:57:44.418035 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_aef9d930-4287-490f-85c7-5a791f985a77/object-server/0.log" Oct 11 05:57:44 crc kubenswrapper[4651]: I1011 05:57:44.541611 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_aef9d930-4287-490f-85c7-5a791f985a77/object-updater/0.log" Oct 11 05:57:44 crc kubenswrapper[4651]: I1011 05:57:44.579245 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_aef9d930-4287-490f-85c7-5a791f985a77/swift-recon-cron/0.log" Oct 11 05:57:44 crc kubenswrapper[4651]: I1011 05:57:44.609103 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_aef9d930-4287-490f-85c7-5a791f985a77/rsync/0.log" Oct 11 05:57:44 crc kubenswrapper[4651]: I1011 05:57:44.783125 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-lzfhh_352a8263-3fc8-49fc-bc0b-6b5671d02fde/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Oct 11 05:57:44 crc kubenswrapper[4651]: I1011 05:57:44.814252 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_76b8d472-5f4e-4d97-be15-0f5be51acd85/tempest-tests-tempest-tests-runner/0.log" Oct 11 05:57:44 crc kubenswrapper[4651]: I1011 05:57:44.998448 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_87851c60-a63d-43bd-a248-6dad36680eed/test-operator-logs-container/0.log" Oct 11 05:57:45 crc kubenswrapper[4651]: I1011 05:57:45.072288 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-x9lqn_1187c352-70c0-4b8f-a7fa-300e4093c60c/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Oct 11 05:57:51 crc kubenswrapper[4651]: I1011 05:57:51.787506 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-cszxx"] Oct 11 05:57:51 crc kubenswrapper[4651]: E1011 05:57:51.793985 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f74e2ba9-5b70-4eb3-978f-2575f7894973" containerName="container-00" Oct 11 05:57:51 crc kubenswrapper[4651]: I1011 05:57:51.794064 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="f74e2ba9-5b70-4eb3-978f-2575f7894973" containerName="container-00" Oct 11 05:57:51 crc kubenswrapper[4651]: I1011 05:57:51.794328 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="f74e2ba9-5b70-4eb3-978f-2575f7894973" containerName="container-00" Oct 11 05:57:51 crc kubenswrapper[4651]: I1011 05:57:51.795742 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cszxx" Oct 11 05:57:51 crc kubenswrapper[4651]: I1011 05:57:51.802545 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-cszxx"] Oct 11 05:57:51 crc kubenswrapper[4651]: I1011 05:57:51.862291 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3f5bda72-d019-4696-8ec0-992580274672-utilities\") pod \"redhat-operators-cszxx\" (UID: \"3f5bda72-d019-4696-8ec0-992580274672\") " pod="openshift-marketplace/redhat-operators-cszxx" Oct 11 05:57:51 crc kubenswrapper[4651]: I1011 05:57:51.862689 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3f5bda72-d019-4696-8ec0-992580274672-catalog-content\") pod \"redhat-operators-cszxx\" (UID: \"3f5bda72-d019-4696-8ec0-992580274672\") " pod="openshift-marketplace/redhat-operators-cszxx" Oct 11 05:57:51 crc kubenswrapper[4651]: I1011 05:57:51.862855 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t6gzt\" (UniqueName: \"kubernetes.io/projected/3f5bda72-d019-4696-8ec0-992580274672-kube-api-access-t6gzt\") pod \"redhat-operators-cszxx\" (UID: \"3f5bda72-d019-4696-8ec0-992580274672\") " pod="openshift-marketplace/redhat-operators-cszxx" Oct 11 05:57:51 crc kubenswrapper[4651]: I1011 05:57:51.964789 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t6gzt\" (UniqueName: \"kubernetes.io/projected/3f5bda72-d019-4696-8ec0-992580274672-kube-api-access-t6gzt\") pod \"redhat-operators-cszxx\" (UID: \"3f5bda72-d019-4696-8ec0-992580274672\") " pod="openshift-marketplace/redhat-operators-cszxx" Oct 11 05:57:51 crc kubenswrapper[4651]: I1011 05:57:51.964919 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3f5bda72-d019-4696-8ec0-992580274672-utilities\") pod \"redhat-operators-cszxx\" (UID: \"3f5bda72-d019-4696-8ec0-992580274672\") " pod="openshift-marketplace/redhat-operators-cszxx" Oct 11 05:57:51 crc kubenswrapper[4651]: I1011 05:57:51.964947 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3f5bda72-d019-4696-8ec0-992580274672-catalog-content\") pod \"redhat-operators-cszxx\" (UID: \"3f5bda72-d019-4696-8ec0-992580274672\") " pod="openshift-marketplace/redhat-operators-cszxx" Oct 11 05:57:51 crc kubenswrapper[4651]: I1011 05:57:51.967492 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3f5bda72-d019-4696-8ec0-992580274672-utilities\") pod \"redhat-operators-cszxx\" (UID: \"3f5bda72-d019-4696-8ec0-992580274672\") " pod="openshift-marketplace/redhat-operators-cszxx" Oct 11 05:57:51 crc kubenswrapper[4651]: I1011 05:57:51.967725 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3f5bda72-d019-4696-8ec0-992580274672-catalog-content\") pod \"redhat-operators-cszxx\" (UID: \"3f5bda72-d019-4696-8ec0-992580274672\") " pod="openshift-marketplace/redhat-operators-cszxx" Oct 11 05:57:51 crc kubenswrapper[4651]: I1011 05:57:51.989535 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t6gzt\" (UniqueName: \"kubernetes.io/projected/3f5bda72-d019-4696-8ec0-992580274672-kube-api-access-t6gzt\") pod \"redhat-operators-cszxx\" (UID: \"3f5bda72-d019-4696-8ec0-992580274672\") " pod="openshift-marketplace/redhat-operators-cszxx" Oct 11 05:57:52 crc kubenswrapper[4651]: I1011 05:57:52.152360 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cszxx" Oct 11 05:57:52 crc kubenswrapper[4651]: I1011 05:57:52.655009 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-cszxx"] Oct 11 05:57:53 crc kubenswrapper[4651]: I1011 05:57:53.092124 4651 generic.go:334] "Generic (PLEG): container finished" podID="3f5bda72-d019-4696-8ec0-992580274672" containerID="63924b3077443c7087f2e26bf0ae83a5bbbd878b7d7c288a8711392317a19e3a" exitCode=0 Oct 11 05:57:53 crc kubenswrapper[4651]: I1011 05:57:53.092546 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cszxx" event={"ID":"3f5bda72-d019-4696-8ec0-992580274672","Type":"ContainerDied","Data":"63924b3077443c7087f2e26bf0ae83a5bbbd878b7d7c288a8711392317a19e3a"} Oct 11 05:57:53 crc kubenswrapper[4651]: I1011 05:57:53.092597 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cszxx" event={"ID":"3f5bda72-d019-4696-8ec0-992580274672","Type":"ContainerStarted","Data":"8d56b69593900d9dca6f5af5fca18200034e34c6683fd29b76225646ae21816d"} Oct 11 05:57:54 crc kubenswrapper[4651]: I1011 05:57:54.113323 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cszxx" event={"ID":"3f5bda72-d019-4696-8ec0-992580274672","Type":"ContainerStarted","Data":"c59ee45ad4142d90c117b3c4f3ab200574b7976aacdea103b3a8fa15c918c734"} Oct 11 05:57:55 crc kubenswrapper[4651]: I1011 05:57:55.123536 4651 generic.go:334] "Generic (PLEG): container finished" podID="3f5bda72-d019-4696-8ec0-992580274672" containerID="c59ee45ad4142d90c117b3c4f3ab200574b7976aacdea103b3a8fa15c918c734" exitCode=0 Oct 11 05:57:55 crc kubenswrapper[4651]: I1011 05:57:55.123935 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cszxx" event={"ID":"3f5bda72-d019-4696-8ec0-992580274672","Type":"ContainerDied","Data":"c59ee45ad4142d90c117b3c4f3ab200574b7976aacdea103b3a8fa15c918c734"} Oct 11 05:57:56 crc kubenswrapper[4651]: I1011 05:57:56.024223 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_935c9395-17f6-4c8f-a08a-e3af25a75a9a/memcached/0.log" Oct 11 05:57:56 crc kubenswrapper[4651]: I1011 05:57:56.133781 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cszxx" event={"ID":"3f5bda72-d019-4696-8ec0-992580274672","Type":"ContainerStarted","Data":"14efe80c08058ed57e6f28642704bbb671bcae0e072786971264ac2a09e3885d"} Oct 11 05:57:56 crc kubenswrapper[4651]: I1011 05:57:56.166721 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-cszxx" podStartSLOduration=2.704747777 podStartE2EDuration="5.166694695s" podCreationTimestamp="2025-10-11 05:57:51 +0000 UTC" firstStartedPulling="2025-10-11 05:57:53.09403685 +0000 UTC m=+3993.990269646" lastFinishedPulling="2025-10-11 05:57:55.555983768 +0000 UTC m=+3996.452216564" observedRunningTime="2025-10-11 05:57:56.162986769 +0000 UTC m=+3997.059219575" watchObservedRunningTime="2025-10-11 05:57:56.166694695 +0000 UTC m=+3997.062927491" Oct 11 05:58:02 crc kubenswrapper[4651]: I1011 05:58:02.153610 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-cszxx" Oct 11 05:58:02 crc kubenswrapper[4651]: I1011 05:58:02.154633 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-cszxx" Oct 11 05:58:02 crc kubenswrapper[4651]: I1011 05:58:02.266644 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-cszxx" Oct 11 05:58:02 crc kubenswrapper[4651]: I1011 05:58:02.360431 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-cszxx" Oct 11 05:58:02 crc kubenswrapper[4651]: I1011 05:58:02.508945 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-cszxx"] Oct 11 05:58:04 crc kubenswrapper[4651]: I1011 05:58:04.214804 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-cszxx" podUID="3f5bda72-d019-4696-8ec0-992580274672" containerName="registry-server" containerID="cri-o://14efe80c08058ed57e6f28642704bbb671bcae0e072786971264ac2a09e3885d" gracePeriod=2 Oct 11 05:58:06 crc kubenswrapper[4651]: I1011 05:58:06.153663 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cszxx" Oct 11 05:58:06 crc kubenswrapper[4651]: I1011 05:58:06.232379 4651 generic.go:334] "Generic (PLEG): container finished" podID="3f5bda72-d019-4696-8ec0-992580274672" containerID="14efe80c08058ed57e6f28642704bbb671bcae0e072786971264ac2a09e3885d" exitCode=0 Oct 11 05:58:06 crc kubenswrapper[4651]: I1011 05:58:06.232438 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cszxx" Oct 11 05:58:06 crc kubenswrapper[4651]: I1011 05:58:06.232486 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cszxx" event={"ID":"3f5bda72-d019-4696-8ec0-992580274672","Type":"ContainerDied","Data":"14efe80c08058ed57e6f28642704bbb671bcae0e072786971264ac2a09e3885d"} Oct 11 05:58:06 crc kubenswrapper[4651]: I1011 05:58:06.232570 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cszxx" event={"ID":"3f5bda72-d019-4696-8ec0-992580274672","Type":"ContainerDied","Data":"8d56b69593900d9dca6f5af5fca18200034e34c6683fd29b76225646ae21816d"} Oct 11 05:58:06 crc kubenswrapper[4651]: I1011 05:58:06.232604 4651 scope.go:117] "RemoveContainer" containerID="14efe80c08058ed57e6f28642704bbb671bcae0e072786971264ac2a09e3885d" Oct 11 05:58:06 crc kubenswrapper[4651]: I1011 05:58:06.259252 4651 scope.go:117] "RemoveContainer" containerID="c59ee45ad4142d90c117b3c4f3ab200574b7976aacdea103b3a8fa15c918c734" Oct 11 05:58:06 crc kubenswrapper[4651]: I1011 05:58:06.283443 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3f5bda72-d019-4696-8ec0-992580274672-utilities\") pod \"3f5bda72-d019-4696-8ec0-992580274672\" (UID: \"3f5bda72-d019-4696-8ec0-992580274672\") " Oct 11 05:58:06 crc kubenswrapper[4651]: I1011 05:58:06.283549 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t6gzt\" (UniqueName: \"kubernetes.io/projected/3f5bda72-d019-4696-8ec0-992580274672-kube-api-access-t6gzt\") pod \"3f5bda72-d019-4696-8ec0-992580274672\" (UID: \"3f5bda72-d019-4696-8ec0-992580274672\") " Oct 11 05:58:06 crc kubenswrapper[4651]: I1011 05:58:06.283602 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3f5bda72-d019-4696-8ec0-992580274672-catalog-content\") pod \"3f5bda72-d019-4696-8ec0-992580274672\" (UID: \"3f5bda72-d019-4696-8ec0-992580274672\") " Oct 11 05:58:06 crc kubenswrapper[4651]: I1011 05:58:06.286325 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3f5bda72-d019-4696-8ec0-992580274672-utilities" (OuterVolumeSpecName: "utilities") pod "3f5bda72-d019-4696-8ec0-992580274672" (UID: "3f5bda72-d019-4696-8ec0-992580274672"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 05:58:06 crc kubenswrapper[4651]: I1011 05:58:06.293657 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3f5bda72-d019-4696-8ec0-992580274672-kube-api-access-t6gzt" (OuterVolumeSpecName: "kube-api-access-t6gzt") pod "3f5bda72-d019-4696-8ec0-992580274672" (UID: "3f5bda72-d019-4696-8ec0-992580274672"). InnerVolumeSpecName "kube-api-access-t6gzt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:58:06 crc kubenswrapper[4651]: I1011 05:58:06.298454 4651 scope.go:117] "RemoveContainer" containerID="63924b3077443c7087f2e26bf0ae83a5bbbd878b7d7c288a8711392317a19e3a" Oct 11 05:58:06 crc kubenswrapper[4651]: I1011 05:58:06.384482 4651 scope.go:117] "RemoveContainer" containerID="14efe80c08058ed57e6f28642704bbb671bcae0e072786971264ac2a09e3885d" Oct 11 05:58:06 crc kubenswrapper[4651]: I1011 05:58:06.386090 4651 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3f5bda72-d019-4696-8ec0-992580274672-utilities\") on node \"crc\" DevicePath \"\"" Oct 11 05:58:06 crc kubenswrapper[4651]: I1011 05:58:06.386118 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t6gzt\" (UniqueName: \"kubernetes.io/projected/3f5bda72-d019-4696-8ec0-992580274672-kube-api-access-t6gzt\") on node \"crc\" DevicePath \"\"" Oct 11 05:58:06 crc kubenswrapper[4651]: E1011 05:58:06.386634 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"14efe80c08058ed57e6f28642704bbb671bcae0e072786971264ac2a09e3885d\": container with ID starting with 14efe80c08058ed57e6f28642704bbb671bcae0e072786971264ac2a09e3885d not found: ID does not exist" containerID="14efe80c08058ed57e6f28642704bbb671bcae0e072786971264ac2a09e3885d" Oct 11 05:58:06 crc kubenswrapper[4651]: I1011 05:58:06.386666 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"14efe80c08058ed57e6f28642704bbb671bcae0e072786971264ac2a09e3885d"} err="failed to get container status \"14efe80c08058ed57e6f28642704bbb671bcae0e072786971264ac2a09e3885d\": rpc error: code = NotFound desc = could not find container \"14efe80c08058ed57e6f28642704bbb671bcae0e072786971264ac2a09e3885d\": container with ID starting with 14efe80c08058ed57e6f28642704bbb671bcae0e072786971264ac2a09e3885d not found: ID does not exist" Oct 11 05:58:06 crc kubenswrapper[4651]: I1011 05:58:06.386696 4651 scope.go:117] "RemoveContainer" containerID="c59ee45ad4142d90c117b3c4f3ab200574b7976aacdea103b3a8fa15c918c734" Oct 11 05:58:06 crc kubenswrapper[4651]: E1011 05:58:06.387102 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c59ee45ad4142d90c117b3c4f3ab200574b7976aacdea103b3a8fa15c918c734\": container with ID starting with c59ee45ad4142d90c117b3c4f3ab200574b7976aacdea103b3a8fa15c918c734 not found: ID does not exist" containerID="c59ee45ad4142d90c117b3c4f3ab200574b7976aacdea103b3a8fa15c918c734" Oct 11 05:58:06 crc kubenswrapper[4651]: I1011 05:58:06.387143 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c59ee45ad4142d90c117b3c4f3ab200574b7976aacdea103b3a8fa15c918c734"} err="failed to get container status \"c59ee45ad4142d90c117b3c4f3ab200574b7976aacdea103b3a8fa15c918c734\": rpc error: code = NotFound desc = could not find container \"c59ee45ad4142d90c117b3c4f3ab200574b7976aacdea103b3a8fa15c918c734\": container with ID starting with c59ee45ad4142d90c117b3c4f3ab200574b7976aacdea103b3a8fa15c918c734 not found: ID does not exist" Oct 11 05:58:06 crc kubenswrapper[4651]: I1011 05:58:06.387169 4651 scope.go:117] "RemoveContainer" containerID="63924b3077443c7087f2e26bf0ae83a5bbbd878b7d7c288a8711392317a19e3a" Oct 11 05:58:06 crc kubenswrapper[4651]: E1011 05:58:06.387517 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"63924b3077443c7087f2e26bf0ae83a5bbbd878b7d7c288a8711392317a19e3a\": container with ID starting with 63924b3077443c7087f2e26bf0ae83a5bbbd878b7d7c288a8711392317a19e3a not found: ID does not exist" containerID="63924b3077443c7087f2e26bf0ae83a5bbbd878b7d7c288a8711392317a19e3a" Oct 11 05:58:06 crc kubenswrapper[4651]: I1011 05:58:06.387545 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"63924b3077443c7087f2e26bf0ae83a5bbbd878b7d7c288a8711392317a19e3a"} err="failed to get container status \"63924b3077443c7087f2e26bf0ae83a5bbbd878b7d7c288a8711392317a19e3a\": rpc error: code = NotFound desc = could not find container \"63924b3077443c7087f2e26bf0ae83a5bbbd878b7d7c288a8711392317a19e3a\": container with ID starting with 63924b3077443c7087f2e26bf0ae83a5bbbd878b7d7c288a8711392317a19e3a not found: ID does not exist" Oct 11 05:58:06 crc kubenswrapper[4651]: I1011 05:58:06.395053 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3f5bda72-d019-4696-8ec0-992580274672-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3f5bda72-d019-4696-8ec0-992580274672" (UID: "3f5bda72-d019-4696-8ec0-992580274672"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 05:58:06 crc kubenswrapper[4651]: I1011 05:58:06.487566 4651 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3f5bda72-d019-4696-8ec0-992580274672-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 11 05:58:06 crc kubenswrapper[4651]: I1011 05:58:06.571806 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-cszxx"] Oct 11 05:58:06 crc kubenswrapper[4651]: I1011 05:58:06.580452 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-cszxx"] Oct 11 05:58:07 crc kubenswrapper[4651]: I1011 05:58:07.882281 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3f5bda72-d019-4696-8ec0-992580274672" path="/var/lib/kubelet/pods/3f5bda72-d019-4696-8ec0-992580274672/volumes" Oct 11 05:58:14 crc kubenswrapper[4651]: I1011 05:58:14.390516 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_4354d16eef074529f4cafdce2fc6f7b790d4265439b68294d417143fbf94bw4_7f8c3a2b-8cc3-4266-bdb2-e896769c8da4/util/0.log" Oct 11 05:58:14 crc kubenswrapper[4651]: I1011 05:58:14.608944 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_4354d16eef074529f4cafdce2fc6f7b790d4265439b68294d417143fbf94bw4_7f8c3a2b-8cc3-4266-bdb2-e896769c8da4/util/0.log" Oct 11 05:58:14 crc kubenswrapper[4651]: I1011 05:58:14.644187 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_4354d16eef074529f4cafdce2fc6f7b790d4265439b68294d417143fbf94bw4_7f8c3a2b-8cc3-4266-bdb2-e896769c8da4/pull/0.log" Oct 11 05:58:14 crc kubenswrapper[4651]: I1011 05:58:14.657889 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_4354d16eef074529f4cafdce2fc6f7b790d4265439b68294d417143fbf94bw4_7f8c3a2b-8cc3-4266-bdb2-e896769c8da4/pull/0.log" Oct 11 05:58:14 crc kubenswrapper[4651]: I1011 05:58:14.823687 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_4354d16eef074529f4cafdce2fc6f7b790d4265439b68294d417143fbf94bw4_7f8c3a2b-8cc3-4266-bdb2-e896769c8da4/extract/0.log" Oct 11 05:58:14 crc kubenswrapper[4651]: I1011 05:58:14.826333 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_4354d16eef074529f4cafdce2fc6f7b790d4265439b68294d417143fbf94bw4_7f8c3a2b-8cc3-4266-bdb2-e896769c8da4/util/0.log" Oct 11 05:58:14 crc kubenswrapper[4651]: I1011 05:58:14.852198 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_4354d16eef074529f4cafdce2fc6f7b790d4265439b68294d417143fbf94bw4_7f8c3a2b-8cc3-4266-bdb2-e896769c8da4/pull/0.log" Oct 11 05:58:14 crc kubenswrapper[4651]: I1011 05:58:14.999811 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-64f84fcdbb-gbn6q_cca7099f-c5ef-4109-91f5-b6831d0771e8/kube-rbac-proxy/0.log" Oct 11 05:58:15 crc kubenswrapper[4651]: I1011 05:58:15.077676 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-64f84fcdbb-gbn6q_cca7099f-c5ef-4109-91f5-b6831d0771e8/manager/0.log" Oct 11 05:58:15 crc kubenswrapper[4651]: I1011 05:58:15.142810 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-59cdc64769-xtlln_b5b061bd-7d85-4960-a956-95c7911591a2/kube-rbac-proxy/0.log" Oct 11 05:58:15 crc kubenswrapper[4651]: I1011 05:58:15.636131 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-687df44cdb-g8m5f_75692ce0-1ecb-4db6-a831-5740382b17e2/manager/0.log" Oct 11 05:58:15 crc kubenswrapper[4651]: I1011 05:58:15.638198 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-687df44cdb-g8m5f_75692ce0-1ecb-4db6-a831-5740382b17e2/kube-rbac-proxy/0.log" Oct 11 05:58:15 crc kubenswrapper[4651]: I1011 05:58:15.654011 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-59cdc64769-xtlln_b5b061bd-7d85-4960-a956-95c7911591a2/manager/0.log" Oct 11 05:58:15 crc kubenswrapper[4651]: I1011 05:58:15.866881 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-7bb46cd7d-t88s8_99b28924-6f7a-4232-8fd5-b245178ce2ea/kube-rbac-proxy/0.log" Oct 11 05:58:15 crc kubenswrapper[4651]: I1011 05:58:15.913795 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-7bb46cd7d-t88s8_99b28924-6f7a-4232-8fd5-b245178ce2ea/manager/0.log" Oct 11 05:58:16 crc kubenswrapper[4651]: I1011 05:58:16.224855 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-6d9967f8dd-prncj_d725cd21-efdc-4182-be84-460db3042d11/kube-rbac-proxy/0.log" Oct 11 05:58:16 crc kubenswrapper[4651]: I1011 05:58:16.265219 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-6d74794d9b-c9bzn_dd5122d9-c098-49cf-9723-bc0c31c6ce3b/kube-rbac-proxy/0.log" Oct 11 05:58:16 crc kubenswrapper[4651]: I1011 05:58:16.307890 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-6d9967f8dd-prncj_d725cd21-efdc-4182-be84-460db3042d11/manager/0.log" Oct 11 05:58:16 crc kubenswrapper[4651]: I1011 05:58:16.402751 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-6d74794d9b-c9bzn_dd5122d9-c098-49cf-9723-bc0c31c6ce3b/manager/0.log" Oct 11 05:58:16 crc kubenswrapper[4651]: I1011 05:58:16.458009 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-8678f847b6-vpnkk_50a25e99-d2ec-4b16-a5fa-894e79ee528e/kube-rbac-proxy/0.log" Oct 11 05:58:16 crc kubenswrapper[4651]: I1011 05:58:16.647371 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-8678f847b6-vpnkk_50a25e99-d2ec-4b16-a5fa-894e79ee528e/manager/0.log" Oct 11 05:58:16 crc kubenswrapper[4651]: I1011 05:58:16.698169 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-74cb5cbc49-fwr7j_205c5753-c94e-4bf4-993f-36b798bb489d/manager/0.log" Oct 11 05:58:16 crc kubenswrapper[4651]: I1011 05:58:16.711781 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-74cb5cbc49-fwr7j_205c5753-c94e-4bf4-993f-36b798bb489d/kube-rbac-proxy/0.log" Oct 11 05:58:16 crc kubenswrapper[4651]: I1011 05:58:16.890048 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-ddb98f99b-989mj_b6b16d99-7f05-464a-a338-dcded4fa42fa/kube-rbac-proxy/0.log" Oct 11 05:58:17 crc kubenswrapper[4651]: I1011 05:58:17.449016 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-ddb98f99b-989mj_b6b16d99-7f05-464a-a338-dcded4fa42fa/manager/0.log" Oct 11 05:58:17 crc kubenswrapper[4651]: I1011 05:58:17.462852 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-59578bc799-9d9sm_5901a38b-902a-4822-8483-9d478e61aa40/kube-rbac-proxy/0.log" Oct 11 05:58:17 crc kubenswrapper[4651]: I1011 05:58:17.490382 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-59578bc799-9d9sm_5901a38b-902a-4822-8483-9d478e61aa40/manager/0.log" Oct 11 05:58:17 crc kubenswrapper[4651]: I1011 05:58:17.663477 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-5777b4f897-zdcb9_519c3c0c-07ee-4f48-ba92-d202190d9a49/manager/0.log" Oct 11 05:58:17 crc kubenswrapper[4651]: I1011 05:58:17.690043 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-5777b4f897-zdcb9_519c3c0c-07ee-4f48-ba92-d202190d9a49/kube-rbac-proxy/0.log" Oct 11 05:58:17 crc kubenswrapper[4651]: I1011 05:58:17.729057 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-797d478b46-xgxfd_fcc9418a-3e9d-4c74-849d-b9884077820c/kube-rbac-proxy/0.log" Oct 11 05:58:17 crc kubenswrapper[4651]: I1011 05:58:17.790135 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-797d478b46-xgxfd_fcc9418a-3e9d-4c74-849d-b9884077820c/manager/0.log" Oct 11 05:58:17 crc kubenswrapper[4651]: I1011 05:58:17.939715 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-57bb74c7bf-7mhch_321665c4-bc9a-47e0-a6c4-a54d56ad5ce8/kube-rbac-proxy/0.log" Oct 11 05:58:17 crc kubenswrapper[4651]: I1011 05:58:17.975418 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-57bb74c7bf-7mhch_321665c4-bc9a-47e0-a6c4-a54d56ad5ce8/manager/0.log" Oct 11 05:58:18 crc kubenswrapper[4651]: I1011 05:58:18.002209 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-6d7c7ddf95-27r6g_4aede6f1-d9d0-4c62-b118-7c93fa2af789/kube-rbac-proxy/0.log" Oct 11 05:58:18 crc kubenswrapper[4651]: I1011 05:58:18.085791 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-6d7c7ddf95-27r6g_4aede6f1-d9d0-4c62-b118-7c93fa2af789/manager/0.log" Oct 11 05:58:18 crc kubenswrapper[4651]: I1011 05:58:18.154871 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-6cc7fb757dxbxc6_9105b503-cbba-48d1-acdb-ac21b7c791b4/kube-rbac-proxy/0.log" Oct 11 05:58:18 crc kubenswrapper[4651]: I1011 05:58:18.161245 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-6cc7fb757dxbxc6_9105b503-cbba-48d1-acdb-ac21b7c791b4/manager/0.log" Oct 11 05:58:18 crc kubenswrapper[4651]: I1011 05:58:18.304752 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-776b77588d-7z9rb_7bcc02c2-c9c4-498e-8f95-ace0d1b98899/kube-rbac-proxy/0.log" Oct 11 05:58:18 crc kubenswrapper[4651]: I1011 05:58:18.394496 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-69c9cf8694-vccpk_25b74915-17cd-4558-9801-5a0d5113b578/kube-rbac-proxy/0.log" Oct 11 05:58:18 crc kubenswrapper[4651]: I1011 05:58:18.509722 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-69c9cf8694-vccpk_25b74915-17cd-4558-9801-5a0d5113b578/operator/0.log" Oct 11 05:58:18 crc kubenswrapper[4651]: I1011 05:58:18.610045 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-q6f6m_69aec95d-9651-4e5f-9cb6-a6ca9d5093f5/registry-server/0.log" Oct 11 05:58:18 crc kubenswrapper[4651]: I1011 05:58:18.758983 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-869cc7797f-6m2qz_c9dabf8d-2991-4af0-99c8-084e157e9b52/kube-rbac-proxy/0.log" Oct 11 05:58:18 crc kubenswrapper[4651]: I1011 05:58:18.886905 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-664664cb68-5s2zz_160294e4-b990-41b3-8f6c-22102366d72c/kube-rbac-proxy/0.log" Oct 11 05:58:18 crc kubenswrapper[4651]: I1011 05:58:18.974263 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-869cc7797f-6m2qz_c9dabf8d-2991-4af0-99c8-084e157e9b52/manager/0.log" Oct 11 05:58:19 crc kubenswrapper[4651]: I1011 05:58:19.048245 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-664664cb68-5s2zz_160294e4-b990-41b3-8f6c-22102366d72c/manager/0.log" Oct 11 05:58:19 crc kubenswrapper[4651]: I1011 05:58:19.075494 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-5f97d8c699-dfqdz_bd36db45-dfcf-4d27-8bfb-fcefeff7f0ba/operator/0.log" Oct 11 05:58:19 crc kubenswrapper[4651]: I1011 05:58:19.249625 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f4d5dfdc6-vmqd8_f4175bba-9aae-4faf-8670-f612f867827e/kube-rbac-proxy/0.log" Oct 11 05:58:19 crc kubenswrapper[4651]: I1011 05:58:19.265057 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-776b77588d-7z9rb_7bcc02c2-c9c4-498e-8f95-ace0d1b98899/manager/0.log" Oct 11 05:58:19 crc kubenswrapper[4651]: I1011 05:58:19.292135 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f4d5dfdc6-vmqd8_f4175bba-9aae-4faf-8670-f612f867827e/manager/0.log" Oct 11 05:58:19 crc kubenswrapper[4651]: I1011 05:58:19.402912 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-578874c84d-x9x8w_555de60b-f68b-42a6-a662-d1e5202a30c5/kube-rbac-proxy/0.log" Oct 11 05:58:19 crc kubenswrapper[4651]: I1011 05:58:19.532423 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-578874c84d-x9x8w_555de60b-f68b-42a6-a662-d1e5202a30c5/manager/0.log" Oct 11 05:58:19 crc kubenswrapper[4651]: I1011 05:58:19.538186 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-ffcdd6c94-p9d4d_0e634116-91fc-4fad-b906-a998e77ea3e4/kube-rbac-proxy/0.log" Oct 11 05:58:19 crc kubenswrapper[4651]: I1011 05:58:19.550838 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-ffcdd6c94-p9d4d_0e634116-91fc-4fad-b906-a998e77ea3e4/manager/0.log" Oct 11 05:58:19 crc kubenswrapper[4651]: I1011 05:58:19.644030 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-646675d848-7w8pk_9bfc9d92-8b6d-4b13-9759-b7185e1f16bb/kube-rbac-proxy/0.log" Oct 11 05:58:19 crc kubenswrapper[4651]: I1011 05:58:19.710151 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-646675d848-7w8pk_9bfc9d92-8b6d-4b13-9759-b7185e1f16bb/manager/0.log" Oct 11 05:58:36 crc kubenswrapper[4651]: I1011 05:58:36.418275 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-8wltd"] Oct 11 05:58:36 crc kubenswrapper[4651]: E1011 05:58:36.419738 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3f5bda72-d019-4696-8ec0-992580274672" containerName="extract-utilities" Oct 11 05:58:36 crc kubenswrapper[4651]: I1011 05:58:36.419761 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="3f5bda72-d019-4696-8ec0-992580274672" containerName="extract-utilities" Oct 11 05:58:36 crc kubenswrapper[4651]: E1011 05:58:36.419808 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3f5bda72-d019-4696-8ec0-992580274672" containerName="registry-server" Oct 11 05:58:36 crc kubenswrapper[4651]: I1011 05:58:36.419844 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="3f5bda72-d019-4696-8ec0-992580274672" containerName="registry-server" Oct 11 05:58:36 crc kubenswrapper[4651]: E1011 05:58:36.419868 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3f5bda72-d019-4696-8ec0-992580274672" containerName="extract-content" Oct 11 05:58:36 crc kubenswrapper[4651]: I1011 05:58:36.419880 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="3f5bda72-d019-4696-8ec0-992580274672" containerName="extract-content" Oct 11 05:58:36 crc kubenswrapper[4651]: I1011 05:58:36.420265 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="3f5bda72-d019-4696-8ec0-992580274672" containerName="registry-server" Oct 11 05:58:36 crc kubenswrapper[4651]: I1011 05:58:36.422847 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8wltd" Oct 11 05:58:36 crc kubenswrapper[4651]: I1011 05:58:36.449737 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-8wltd"] Oct 11 05:58:36 crc kubenswrapper[4651]: I1011 05:58:36.513103 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8cd8a09c-c03e-446e-95d2-f5fbecd8abd0-catalog-content\") pod \"community-operators-8wltd\" (UID: \"8cd8a09c-c03e-446e-95d2-f5fbecd8abd0\") " pod="openshift-marketplace/community-operators-8wltd" Oct 11 05:58:36 crc kubenswrapper[4651]: I1011 05:58:36.513385 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kvmdn\" (UniqueName: \"kubernetes.io/projected/8cd8a09c-c03e-446e-95d2-f5fbecd8abd0-kube-api-access-kvmdn\") pod \"community-operators-8wltd\" (UID: \"8cd8a09c-c03e-446e-95d2-f5fbecd8abd0\") " pod="openshift-marketplace/community-operators-8wltd" Oct 11 05:58:36 crc kubenswrapper[4651]: I1011 05:58:36.513464 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8cd8a09c-c03e-446e-95d2-f5fbecd8abd0-utilities\") pod \"community-operators-8wltd\" (UID: \"8cd8a09c-c03e-446e-95d2-f5fbecd8abd0\") " pod="openshift-marketplace/community-operators-8wltd" Oct 11 05:58:36 crc kubenswrapper[4651]: I1011 05:58:36.615690 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8cd8a09c-c03e-446e-95d2-f5fbecd8abd0-catalog-content\") pod \"community-operators-8wltd\" (UID: \"8cd8a09c-c03e-446e-95d2-f5fbecd8abd0\") " pod="openshift-marketplace/community-operators-8wltd" Oct 11 05:58:36 crc kubenswrapper[4651]: I1011 05:58:36.615759 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kvmdn\" (UniqueName: \"kubernetes.io/projected/8cd8a09c-c03e-446e-95d2-f5fbecd8abd0-kube-api-access-kvmdn\") pod \"community-operators-8wltd\" (UID: \"8cd8a09c-c03e-446e-95d2-f5fbecd8abd0\") " pod="openshift-marketplace/community-operators-8wltd" Oct 11 05:58:36 crc kubenswrapper[4651]: I1011 05:58:36.615776 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8cd8a09c-c03e-446e-95d2-f5fbecd8abd0-utilities\") pod \"community-operators-8wltd\" (UID: \"8cd8a09c-c03e-446e-95d2-f5fbecd8abd0\") " pod="openshift-marketplace/community-operators-8wltd" Oct 11 05:58:36 crc kubenswrapper[4651]: I1011 05:58:36.616355 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8cd8a09c-c03e-446e-95d2-f5fbecd8abd0-utilities\") pod \"community-operators-8wltd\" (UID: \"8cd8a09c-c03e-446e-95d2-f5fbecd8abd0\") " pod="openshift-marketplace/community-operators-8wltd" Oct 11 05:58:36 crc kubenswrapper[4651]: I1011 05:58:36.616567 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8cd8a09c-c03e-446e-95d2-f5fbecd8abd0-catalog-content\") pod \"community-operators-8wltd\" (UID: \"8cd8a09c-c03e-446e-95d2-f5fbecd8abd0\") " pod="openshift-marketplace/community-operators-8wltd" Oct 11 05:58:36 crc kubenswrapper[4651]: I1011 05:58:36.640099 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kvmdn\" (UniqueName: \"kubernetes.io/projected/8cd8a09c-c03e-446e-95d2-f5fbecd8abd0-kube-api-access-kvmdn\") pod \"community-operators-8wltd\" (UID: \"8cd8a09c-c03e-446e-95d2-f5fbecd8abd0\") " pod="openshift-marketplace/community-operators-8wltd" Oct 11 05:58:36 crc kubenswrapper[4651]: I1011 05:58:36.757379 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8wltd" Oct 11 05:58:37 crc kubenswrapper[4651]: I1011 05:58:37.287773 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-8wltd"] Oct 11 05:58:37 crc kubenswrapper[4651]: I1011 05:58:37.536124 4651 generic.go:334] "Generic (PLEG): container finished" podID="8cd8a09c-c03e-446e-95d2-f5fbecd8abd0" containerID="79d3d68f1f5c129634fbb90ff2757b7448d43d27b397cd5b335ada643ae16488" exitCode=0 Oct 11 05:58:37 crc kubenswrapper[4651]: I1011 05:58:37.536163 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8wltd" event={"ID":"8cd8a09c-c03e-446e-95d2-f5fbecd8abd0","Type":"ContainerDied","Data":"79d3d68f1f5c129634fbb90ff2757b7448d43d27b397cd5b335ada643ae16488"} Oct 11 05:58:37 crc kubenswrapper[4651]: I1011 05:58:37.537859 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8wltd" event={"ID":"8cd8a09c-c03e-446e-95d2-f5fbecd8abd0","Type":"ContainerStarted","Data":"3a67bf6d1bfcf6b9789f65fec23cf367c2d34d38c5b7177380dbabced67abbe4"} Oct 11 05:58:38 crc kubenswrapper[4651]: I1011 05:58:38.547348 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8wltd" event={"ID":"8cd8a09c-c03e-446e-95d2-f5fbecd8abd0","Type":"ContainerStarted","Data":"f95692a95b9a8806d60048e01f96548e94c3127d4e8084f71d3bfe97ac9d737f"} Oct 11 05:58:38 crc kubenswrapper[4651]: I1011 05:58:38.935110 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-27wgh_f8fd1293-3d68-4dd2-bc12-8f7c02017bcd/control-plane-machine-set-operator/0.log" Oct 11 05:58:39 crc kubenswrapper[4651]: I1011 05:58:39.147663 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-2mj56_b590e4d5-1684-4e2f-b5e9-8fbf00db4546/kube-rbac-proxy/0.log" Oct 11 05:58:39 crc kubenswrapper[4651]: I1011 05:58:39.192948 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-2mj56_b590e4d5-1684-4e2f-b5e9-8fbf00db4546/machine-api-operator/0.log" Oct 11 05:58:40 crc kubenswrapper[4651]: I1011 05:58:40.562635 4651 generic.go:334] "Generic (PLEG): container finished" podID="8cd8a09c-c03e-446e-95d2-f5fbecd8abd0" containerID="f95692a95b9a8806d60048e01f96548e94c3127d4e8084f71d3bfe97ac9d737f" exitCode=0 Oct 11 05:58:40 crc kubenswrapper[4651]: I1011 05:58:40.562710 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8wltd" event={"ID":"8cd8a09c-c03e-446e-95d2-f5fbecd8abd0","Type":"ContainerDied","Data":"f95692a95b9a8806d60048e01f96548e94c3127d4e8084f71d3bfe97ac9d737f"} Oct 11 05:58:41 crc kubenswrapper[4651]: I1011 05:58:41.575059 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8wltd" event={"ID":"8cd8a09c-c03e-446e-95d2-f5fbecd8abd0","Type":"ContainerStarted","Data":"362d43702b11495619308c2b49b246466b98ff9a6c107f2650e95c10f87c586e"} Oct 11 05:58:41 crc kubenswrapper[4651]: I1011 05:58:41.595608 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-8wltd" podStartSLOduration=2.044834087 podStartE2EDuration="5.595588967s" podCreationTimestamp="2025-10-11 05:58:36 +0000 UTC" firstStartedPulling="2025-10-11 05:58:37.538555545 +0000 UTC m=+4038.434788361" lastFinishedPulling="2025-10-11 05:58:41.089310445 +0000 UTC m=+4041.985543241" observedRunningTime="2025-10-11 05:58:41.593100763 +0000 UTC m=+4042.489333589" watchObservedRunningTime="2025-10-11 05:58:41.595588967 +0000 UTC m=+4042.491821773" Oct 11 05:58:46 crc kubenswrapper[4651]: I1011 05:58:46.758248 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-8wltd" Oct 11 05:58:46 crc kubenswrapper[4651]: I1011 05:58:46.758719 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-8wltd" Oct 11 05:58:46 crc kubenswrapper[4651]: I1011 05:58:46.834563 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-8wltd" Oct 11 05:58:47 crc kubenswrapper[4651]: I1011 05:58:47.680548 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-8wltd" Oct 11 05:58:47 crc kubenswrapper[4651]: I1011 05:58:47.737525 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-8wltd"] Oct 11 05:58:49 crc kubenswrapper[4651]: I1011 05:58:49.648505 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-8wltd" podUID="8cd8a09c-c03e-446e-95d2-f5fbecd8abd0" containerName="registry-server" containerID="cri-o://362d43702b11495619308c2b49b246466b98ff9a6c107f2650e95c10f87c586e" gracePeriod=2 Oct 11 05:58:50 crc kubenswrapper[4651]: I1011 05:58:50.669528 4651 generic.go:334] "Generic (PLEG): container finished" podID="8cd8a09c-c03e-446e-95d2-f5fbecd8abd0" containerID="362d43702b11495619308c2b49b246466b98ff9a6c107f2650e95c10f87c586e" exitCode=0 Oct 11 05:58:50 crc kubenswrapper[4651]: I1011 05:58:50.669613 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8wltd" event={"ID":"8cd8a09c-c03e-446e-95d2-f5fbecd8abd0","Type":"ContainerDied","Data":"362d43702b11495619308c2b49b246466b98ff9a6c107f2650e95c10f87c586e"} Oct 11 05:58:50 crc kubenswrapper[4651]: I1011 05:58:50.929604 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8wltd" Oct 11 05:58:51 crc kubenswrapper[4651]: I1011 05:58:51.040654 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8cd8a09c-c03e-446e-95d2-f5fbecd8abd0-catalog-content\") pod \"8cd8a09c-c03e-446e-95d2-f5fbecd8abd0\" (UID: \"8cd8a09c-c03e-446e-95d2-f5fbecd8abd0\") " Oct 11 05:58:51 crc kubenswrapper[4651]: I1011 05:58:51.040900 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8cd8a09c-c03e-446e-95d2-f5fbecd8abd0-utilities\") pod \"8cd8a09c-c03e-446e-95d2-f5fbecd8abd0\" (UID: \"8cd8a09c-c03e-446e-95d2-f5fbecd8abd0\") " Oct 11 05:58:51 crc kubenswrapper[4651]: I1011 05:58:51.040928 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kvmdn\" (UniqueName: \"kubernetes.io/projected/8cd8a09c-c03e-446e-95d2-f5fbecd8abd0-kube-api-access-kvmdn\") pod \"8cd8a09c-c03e-446e-95d2-f5fbecd8abd0\" (UID: \"8cd8a09c-c03e-446e-95d2-f5fbecd8abd0\") " Oct 11 05:58:51 crc kubenswrapper[4651]: I1011 05:58:51.041812 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8cd8a09c-c03e-446e-95d2-f5fbecd8abd0-utilities" (OuterVolumeSpecName: "utilities") pod "8cd8a09c-c03e-446e-95d2-f5fbecd8abd0" (UID: "8cd8a09c-c03e-446e-95d2-f5fbecd8abd0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 05:58:51 crc kubenswrapper[4651]: I1011 05:58:51.049579 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cd8a09c-c03e-446e-95d2-f5fbecd8abd0-kube-api-access-kvmdn" (OuterVolumeSpecName: "kube-api-access-kvmdn") pod "8cd8a09c-c03e-446e-95d2-f5fbecd8abd0" (UID: "8cd8a09c-c03e-446e-95d2-f5fbecd8abd0"). InnerVolumeSpecName "kube-api-access-kvmdn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 05:58:51 crc kubenswrapper[4651]: I1011 05:58:51.101626 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8cd8a09c-c03e-446e-95d2-f5fbecd8abd0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8cd8a09c-c03e-446e-95d2-f5fbecd8abd0" (UID: "8cd8a09c-c03e-446e-95d2-f5fbecd8abd0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 05:58:51 crc kubenswrapper[4651]: I1011 05:58:51.143069 4651 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8cd8a09c-c03e-446e-95d2-f5fbecd8abd0-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 11 05:58:51 crc kubenswrapper[4651]: I1011 05:58:51.143102 4651 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8cd8a09c-c03e-446e-95d2-f5fbecd8abd0-utilities\") on node \"crc\" DevicePath \"\"" Oct 11 05:58:51 crc kubenswrapper[4651]: I1011 05:58:51.143112 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kvmdn\" (UniqueName: \"kubernetes.io/projected/8cd8a09c-c03e-446e-95d2-f5fbecd8abd0-kube-api-access-kvmdn\") on node \"crc\" DevicePath \"\"" Oct 11 05:58:51 crc kubenswrapper[4651]: I1011 05:58:51.682963 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8wltd" event={"ID":"8cd8a09c-c03e-446e-95d2-f5fbecd8abd0","Type":"ContainerDied","Data":"3a67bf6d1bfcf6b9789f65fec23cf367c2d34d38c5b7177380dbabced67abbe4"} Oct 11 05:58:51 crc kubenswrapper[4651]: I1011 05:58:51.683009 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8wltd" Oct 11 05:58:51 crc kubenswrapper[4651]: I1011 05:58:51.683534 4651 scope.go:117] "RemoveContainer" containerID="362d43702b11495619308c2b49b246466b98ff9a6c107f2650e95c10f87c586e" Oct 11 05:58:51 crc kubenswrapper[4651]: I1011 05:58:51.711643 4651 scope.go:117] "RemoveContainer" containerID="f95692a95b9a8806d60048e01f96548e94c3127d4e8084f71d3bfe97ac9d737f" Oct 11 05:58:51 crc kubenswrapper[4651]: I1011 05:58:51.744228 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-8wltd"] Oct 11 05:58:51 crc kubenswrapper[4651]: I1011 05:58:51.755893 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-8wltd"] Oct 11 05:58:51 crc kubenswrapper[4651]: I1011 05:58:51.762192 4651 scope.go:117] "RemoveContainer" containerID="79d3d68f1f5c129634fbb90ff2757b7448d43d27b397cd5b335ada643ae16488" Oct 11 05:58:51 crc kubenswrapper[4651]: I1011 05:58:51.882649 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cd8a09c-c03e-446e-95d2-f5fbecd8abd0" path="/var/lib/kubelet/pods/8cd8a09c-c03e-446e-95d2-f5fbecd8abd0/volumes" Oct 11 05:58:52 crc kubenswrapper[4651]: I1011 05:58:52.300196 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-pmm46_d483b46d-edb9-4b36-b0a4-3c959e0f6aca/cert-manager-controller/0.log" Oct 11 05:58:52 crc kubenswrapper[4651]: I1011 05:58:52.684199 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-5t4wt_4045e2c1-af15-42ee-bfee-f72d32924237/cert-manager-cainjector/0.log" Oct 11 05:58:52 crc kubenswrapper[4651]: I1011 05:58:52.737238 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-l859c_54d95539-bc93-45d6-a26a-95284f123cde/cert-manager-webhook/0.log" Oct 11 05:59:05 crc kubenswrapper[4651]: I1011 05:59:05.555474 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-6b874cbd85-zwhxz_03a76fa3-7b5c-4b1c-9c67-d54f0e448c2d/nmstate-console-plugin/0.log" Oct 11 05:59:05 crc kubenswrapper[4651]: I1011 05:59:05.702591 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-qpgc7_50f6467a-ad8d-4828-81ba-b944dccc4be7/nmstate-handler/0.log" Oct 11 05:59:05 crc kubenswrapper[4651]: I1011 05:59:05.730230 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-fdff9cb8d-gg29l_298ed4c9-1190-4617-a3c7-147f15e1fea3/kube-rbac-proxy/0.log" Oct 11 05:59:05 crc kubenswrapper[4651]: I1011 05:59:05.799725 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-fdff9cb8d-gg29l_298ed4c9-1190-4617-a3c7-147f15e1fea3/nmstate-metrics/0.log" Oct 11 05:59:05 crc kubenswrapper[4651]: I1011 05:59:05.912656 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-858ddd8f98-p95g9_c7c3404e-ed5a-48d9-b525-7451514c9a5c/nmstate-operator/0.log" Oct 11 05:59:05 crc kubenswrapper[4651]: I1011 05:59:05.952213 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-6cdbc54649-dlvdg_85293b68-d608-466d-9aa5-2b5eae8edc74/nmstate-webhook/0.log" Oct 11 05:59:16 crc kubenswrapper[4651]: I1011 05:59:16.311259 4651 patch_prober.go:28] interesting pod/machine-config-daemon-78jnv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 05:59:16 crc kubenswrapper[4651]: I1011 05:59:16.312244 4651 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 05:59:20 crc kubenswrapper[4651]: I1011 05:59:20.198305 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-68d546b9d8-gbzvh_b23b74fb-01aa-4027-978b-ef5fccb6a023/kube-rbac-proxy/0.log" Oct 11 05:59:20 crc kubenswrapper[4651]: I1011 05:59:20.349593 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-68d546b9d8-gbzvh_b23b74fb-01aa-4027-978b-ef5fccb6a023/controller/0.log" Oct 11 05:59:20 crc kubenswrapper[4651]: I1011 05:59:20.471126 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-64bf5d555-sxrwh_4a2881f0-834f-4e9d-8be5-5adb1f5feefd/frr-k8s-webhook-server/0.log" Oct 11 05:59:20 crc kubenswrapper[4651]: I1011 05:59:20.538438 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-x9n5l_870fb108-5a7a-4ade-82fd-8cf3c09950b8/cp-frr-files/0.log" Oct 11 05:59:20 crc kubenswrapper[4651]: I1011 05:59:20.731993 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-x9n5l_870fb108-5a7a-4ade-82fd-8cf3c09950b8/cp-frr-files/0.log" Oct 11 05:59:20 crc kubenswrapper[4651]: I1011 05:59:20.753893 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-x9n5l_870fb108-5a7a-4ade-82fd-8cf3c09950b8/cp-reloader/0.log" Oct 11 05:59:20 crc kubenswrapper[4651]: I1011 05:59:20.783259 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-x9n5l_870fb108-5a7a-4ade-82fd-8cf3c09950b8/cp-metrics/0.log" Oct 11 05:59:20 crc kubenswrapper[4651]: I1011 05:59:20.813292 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-x9n5l_870fb108-5a7a-4ade-82fd-8cf3c09950b8/cp-reloader/0.log" Oct 11 05:59:20 crc kubenswrapper[4651]: I1011 05:59:20.981703 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-x9n5l_870fb108-5a7a-4ade-82fd-8cf3c09950b8/cp-frr-files/0.log" Oct 11 05:59:20 crc kubenswrapper[4651]: I1011 05:59:20.986262 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-x9n5l_870fb108-5a7a-4ade-82fd-8cf3c09950b8/cp-metrics/0.log" Oct 11 05:59:21 crc kubenswrapper[4651]: I1011 05:59:21.011517 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-x9n5l_870fb108-5a7a-4ade-82fd-8cf3c09950b8/cp-reloader/0.log" Oct 11 05:59:21 crc kubenswrapper[4651]: I1011 05:59:21.012481 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-x9n5l_870fb108-5a7a-4ade-82fd-8cf3c09950b8/cp-metrics/0.log" Oct 11 05:59:21 crc kubenswrapper[4651]: I1011 05:59:21.202186 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-x9n5l_870fb108-5a7a-4ade-82fd-8cf3c09950b8/cp-metrics/0.log" Oct 11 05:59:21 crc kubenswrapper[4651]: I1011 05:59:21.205963 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-x9n5l_870fb108-5a7a-4ade-82fd-8cf3c09950b8/cp-frr-files/0.log" Oct 11 05:59:21 crc kubenswrapper[4651]: I1011 05:59:21.210010 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-x9n5l_870fb108-5a7a-4ade-82fd-8cf3c09950b8/cp-reloader/0.log" Oct 11 05:59:21 crc kubenswrapper[4651]: I1011 05:59:21.241568 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-x9n5l_870fb108-5a7a-4ade-82fd-8cf3c09950b8/controller/0.log" Oct 11 05:59:21 crc kubenswrapper[4651]: I1011 05:59:21.362673 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-x9n5l_870fb108-5a7a-4ade-82fd-8cf3c09950b8/frr-metrics/0.log" Oct 11 05:59:21 crc kubenswrapper[4651]: I1011 05:59:21.363048 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-x9n5l_870fb108-5a7a-4ade-82fd-8cf3c09950b8/kube-rbac-proxy/0.log" Oct 11 05:59:21 crc kubenswrapper[4651]: I1011 05:59:21.424137 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-x9n5l_870fb108-5a7a-4ade-82fd-8cf3c09950b8/kube-rbac-proxy-frr/0.log" Oct 11 05:59:21 crc kubenswrapper[4651]: I1011 05:59:21.531799 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-x9n5l_870fb108-5a7a-4ade-82fd-8cf3c09950b8/reloader/0.log" Oct 11 05:59:21 crc kubenswrapper[4651]: I1011 05:59:21.599743 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-64d4c8dfd9-ssxfc_176a1bb0-149f-47fd-b9ba-d3249b405fa1/manager/0.log" Oct 11 05:59:21 crc kubenswrapper[4651]: I1011 05:59:21.770117 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-6ddcb68dc7-mhfj6_3da924ac-510e-4c75-8e5d-2571c454a7a5/webhook-server/0.log" Oct 11 05:59:21 crc kubenswrapper[4651]: I1011 05:59:21.951477 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-j7mlx_a8bd8e31-66a3-492c-8cc4-be80c4b0dcb6/kube-rbac-proxy/0.log" Oct 11 05:59:22 crc kubenswrapper[4651]: I1011 05:59:22.421753 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-j7mlx_a8bd8e31-66a3-492c-8cc4-be80c4b0dcb6/speaker/0.log" Oct 11 05:59:22 crc kubenswrapper[4651]: I1011 05:59:22.747749 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-x9n5l_870fb108-5a7a-4ade-82fd-8cf3c09950b8/frr/0.log" Oct 11 05:59:36 crc kubenswrapper[4651]: I1011 05:59:36.551466 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2xfp9d_080b871c-11bf-4ef1-b785-9058524b6c82/util/0.log" Oct 11 05:59:36 crc kubenswrapper[4651]: I1011 05:59:36.707518 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2xfp9d_080b871c-11bf-4ef1-b785-9058524b6c82/pull/0.log" Oct 11 05:59:36 crc kubenswrapper[4651]: I1011 05:59:36.748500 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2xfp9d_080b871c-11bf-4ef1-b785-9058524b6c82/pull/0.log" Oct 11 05:59:36 crc kubenswrapper[4651]: I1011 05:59:36.748906 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2xfp9d_080b871c-11bf-4ef1-b785-9058524b6c82/util/0.log" Oct 11 05:59:36 crc kubenswrapper[4651]: I1011 05:59:36.979219 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2xfp9d_080b871c-11bf-4ef1-b785-9058524b6c82/util/0.log" Oct 11 05:59:37 crc kubenswrapper[4651]: I1011 05:59:37.008560 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2xfp9d_080b871c-11bf-4ef1-b785-9058524b6c82/extract/0.log" Oct 11 05:59:37 crc kubenswrapper[4651]: I1011 05:59:37.055972 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2xfp9d_080b871c-11bf-4ef1-b785-9058524b6c82/pull/0.log" Oct 11 05:59:37 crc kubenswrapper[4651]: I1011 05:59:37.126191 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-bmwjb_d813ef9d-786d-41a3-8170-90be0cf304bf/extract-utilities/0.log" Oct 11 05:59:37 crc kubenswrapper[4651]: I1011 05:59:37.347688 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-bmwjb_d813ef9d-786d-41a3-8170-90be0cf304bf/extract-utilities/0.log" Oct 11 05:59:37 crc kubenswrapper[4651]: I1011 05:59:37.353112 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-bmwjb_d813ef9d-786d-41a3-8170-90be0cf304bf/extract-content/0.log" Oct 11 05:59:37 crc kubenswrapper[4651]: I1011 05:59:37.355444 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-bmwjb_d813ef9d-786d-41a3-8170-90be0cf304bf/extract-content/0.log" Oct 11 05:59:37 crc kubenswrapper[4651]: I1011 05:59:37.535715 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-bmwjb_d813ef9d-786d-41a3-8170-90be0cf304bf/extract-utilities/0.log" Oct 11 05:59:37 crc kubenswrapper[4651]: I1011 05:59:37.537660 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-bmwjb_d813ef9d-786d-41a3-8170-90be0cf304bf/extract-content/0.log" Oct 11 05:59:37 crc kubenswrapper[4651]: I1011 05:59:37.766972 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-vs2v6_819c85ef-b451-47ed-88f6-1790f362d446/extract-utilities/0.log" Oct 11 05:59:37 crc kubenswrapper[4651]: I1011 05:59:37.968951 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-bmwjb_d813ef9d-786d-41a3-8170-90be0cf304bf/registry-server/0.log" Oct 11 05:59:38 crc kubenswrapper[4651]: I1011 05:59:38.014238 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-vs2v6_819c85ef-b451-47ed-88f6-1790f362d446/extract-content/0.log" Oct 11 05:59:38 crc kubenswrapper[4651]: I1011 05:59:38.063386 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-vs2v6_819c85ef-b451-47ed-88f6-1790f362d446/extract-utilities/0.log" Oct 11 05:59:38 crc kubenswrapper[4651]: I1011 05:59:38.069289 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-vs2v6_819c85ef-b451-47ed-88f6-1790f362d446/extract-content/0.log" Oct 11 05:59:38 crc kubenswrapper[4651]: I1011 05:59:38.913306 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-vs2v6_819c85ef-b451-47ed-88f6-1790f362d446/extract-content/0.log" Oct 11 05:59:38 crc kubenswrapper[4651]: I1011 05:59:38.921218 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-vs2v6_819c85ef-b451-47ed-88f6-1790f362d446/extract-utilities/0.log" Oct 11 05:59:39 crc kubenswrapper[4651]: I1011 05:59:39.176629 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c729hm_f4b8564c-1ee2-4b5b-8d7a-aa95cda9486c/util/0.log" Oct 11 05:59:39 crc kubenswrapper[4651]: I1011 05:59:39.245354 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-vs2v6_819c85ef-b451-47ed-88f6-1790f362d446/registry-server/0.log" Oct 11 05:59:39 crc kubenswrapper[4651]: I1011 05:59:39.357777 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c729hm_f4b8564c-1ee2-4b5b-8d7a-aa95cda9486c/pull/0.log" Oct 11 05:59:39 crc kubenswrapper[4651]: I1011 05:59:39.357958 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c729hm_f4b8564c-1ee2-4b5b-8d7a-aa95cda9486c/util/0.log" Oct 11 05:59:39 crc kubenswrapper[4651]: I1011 05:59:39.404073 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c729hm_f4b8564c-1ee2-4b5b-8d7a-aa95cda9486c/pull/0.log" Oct 11 05:59:39 crc kubenswrapper[4651]: I1011 05:59:39.557242 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c729hm_f4b8564c-1ee2-4b5b-8d7a-aa95cda9486c/util/0.log" Oct 11 05:59:39 crc kubenswrapper[4651]: I1011 05:59:39.595247 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c729hm_f4b8564c-1ee2-4b5b-8d7a-aa95cda9486c/extract/0.log" Oct 11 05:59:39 crc kubenswrapper[4651]: I1011 05:59:39.609633 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c729hm_f4b8564c-1ee2-4b5b-8d7a-aa95cda9486c/pull/0.log" Oct 11 05:59:39 crc kubenswrapper[4651]: I1011 05:59:39.744538 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-c7pll_112a830a-ce46-4e30-8d29-10f0605944d9/marketplace-operator/0.log" Oct 11 05:59:39 crc kubenswrapper[4651]: I1011 05:59:39.803788 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-rvmdl_0591b55e-2399-450c-9738-6160d1d25ee1/extract-utilities/0.log" Oct 11 05:59:40 crc kubenswrapper[4651]: I1011 05:59:40.006748 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-rvmdl_0591b55e-2399-450c-9738-6160d1d25ee1/extract-content/0.log" Oct 11 05:59:40 crc kubenswrapper[4651]: I1011 05:59:40.024754 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-rvmdl_0591b55e-2399-450c-9738-6160d1d25ee1/extract-content/0.log" Oct 11 05:59:40 crc kubenswrapper[4651]: I1011 05:59:40.074102 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-rvmdl_0591b55e-2399-450c-9738-6160d1d25ee1/extract-utilities/0.log" Oct 11 05:59:40 crc kubenswrapper[4651]: I1011 05:59:40.197842 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-rvmdl_0591b55e-2399-450c-9738-6160d1d25ee1/extract-utilities/0.log" Oct 11 05:59:40 crc kubenswrapper[4651]: I1011 05:59:40.197957 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-rvmdl_0591b55e-2399-450c-9738-6160d1d25ee1/extract-content/0.log" Oct 11 05:59:40 crc kubenswrapper[4651]: I1011 05:59:40.322462 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-strx6_60df6e10-9dc8-478b-a424-a86b47a1ba0a/extract-utilities/0.log" Oct 11 05:59:40 crc kubenswrapper[4651]: I1011 05:59:40.437271 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-rvmdl_0591b55e-2399-450c-9738-6160d1d25ee1/registry-server/0.log" Oct 11 05:59:41 crc kubenswrapper[4651]: I1011 05:59:41.233886 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-strx6_60df6e10-9dc8-478b-a424-a86b47a1ba0a/extract-utilities/0.log" Oct 11 05:59:41 crc kubenswrapper[4651]: I1011 05:59:41.272906 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-strx6_60df6e10-9dc8-478b-a424-a86b47a1ba0a/extract-content/0.log" Oct 11 05:59:41 crc kubenswrapper[4651]: I1011 05:59:41.273039 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-strx6_60df6e10-9dc8-478b-a424-a86b47a1ba0a/extract-content/0.log" Oct 11 05:59:41 crc kubenswrapper[4651]: I1011 05:59:41.465161 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-strx6_60df6e10-9dc8-478b-a424-a86b47a1ba0a/extract-content/0.log" Oct 11 05:59:41 crc kubenswrapper[4651]: I1011 05:59:41.465765 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-strx6_60df6e10-9dc8-478b-a424-a86b47a1ba0a/extract-utilities/0.log" Oct 11 05:59:42 crc kubenswrapper[4651]: I1011 05:59:42.030945 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-strx6_60df6e10-9dc8-478b-a424-a86b47a1ba0a/registry-server/0.log" Oct 11 05:59:46 crc kubenswrapper[4651]: I1011 05:59:46.311026 4651 patch_prober.go:28] interesting pod/machine-config-daemon-78jnv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 05:59:46 crc kubenswrapper[4651]: I1011 05:59:46.311951 4651 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 06:00:00 crc kubenswrapper[4651]: I1011 06:00:00.152523 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29336040-jjxxd"] Oct 11 06:00:00 crc kubenswrapper[4651]: E1011 06:00:00.154660 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8cd8a09c-c03e-446e-95d2-f5fbecd8abd0" containerName="extract-utilities" Oct 11 06:00:00 crc kubenswrapper[4651]: I1011 06:00:00.154785 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="8cd8a09c-c03e-446e-95d2-f5fbecd8abd0" containerName="extract-utilities" Oct 11 06:00:00 crc kubenswrapper[4651]: E1011 06:00:00.154881 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8cd8a09c-c03e-446e-95d2-f5fbecd8abd0" containerName="extract-content" Oct 11 06:00:00 crc kubenswrapper[4651]: I1011 06:00:00.154949 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="8cd8a09c-c03e-446e-95d2-f5fbecd8abd0" containerName="extract-content" Oct 11 06:00:00 crc kubenswrapper[4651]: E1011 06:00:00.155011 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8cd8a09c-c03e-446e-95d2-f5fbecd8abd0" containerName="registry-server" Oct 11 06:00:00 crc kubenswrapper[4651]: I1011 06:00:00.155065 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="8cd8a09c-c03e-446e-95d2-f5fbecd8abd0" containerName="registry-server" Oct 11 06:00:00 crc kubenswrapper[4651]: I1011 06:00:00.155328 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="8cd8a09c-c03e-446e-95d2-f5fbecd8abd0" containerName="registry-server" Oct 11 06:00:00 crc kubenswrapper[4651]: I1011 06:00:00.156089 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29336040-jjxxd" Oct 11 06:00:00 crc kubenswrapper[4651]: I1011 06:00:00.158415 4651 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 11 06:00:00 crc kubenswrapper[4651]: I1011 06:00:00.158702 4651 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 11 06:00:00 crc kubenswrapper[4651]: I1011 06:00:00.163983 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29336040-jjxxd"] Oct 11 06:00:00 crc kubenswrapper[4651]: I1011 06:00:00.340330 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dfvv9\" (UniqueName: \"kubernetes.io/projected/6e184848-a157-4ab0-9a4b-0ed94df37016-kube-api-access-dfvv9\") pod \"collect-profiles-29336040-jjxxd\" (UID: \"6e184848-a157-4ab0-9a4b-0ed94df37016\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29336040-jjxxd" Oct 11 06:00:00 crc kubenswrapper[4651]: I1011 06:00:00.340436 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6e184848-a157-4ab0-9a4b-0ed94df37016-secret-volume\") pod \"collect-profiles-29336040-jjxxd\" (UID: \"6e184848-a157-4ab0-9a4b-0ed94df37016\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29336040-jjxxd" Oct 11 06:00:00 crc kubenswrapper[4651]: I1011 06:00:00.340492 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6e184848-a157-4ab0-9a4b-0ed94df37016-config-volume\") pod \"collect-profiles-29336040-jjxxd\" (UID: \"6e184848-a157-4ab0-9a4b-0ed94df37016\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29336040-jjxxd" Oct 11 06:00:00 crc kubenswrapper[4651]: I1011 06:00:00.442746 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dfvv9\" (UniqueName: \"kubernetes.io/projected/6e184848-a157-4ab0-9a4b-0ed94df37016-kube-api-access-dfvv9\") pod \"collect-profiles-29336040-jjxxd\" (UID: \"6e184848-a157-4ab0-9a4b-0ed94df37016\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29336040-jjxxd" Oct 11 06:00:00 crc kubenswrapper[4651]: I1011 06:00:00.443129 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6e184848-a157-4ab0-9a4b-0ed94df37016-secret-volume\") pod \"collect-profiles-29336040-jjxxd\" (UID: \"6e184848-a157-4ab0-9a4b-0ed94df37016\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29336040-jjxxd" Oct 11 06:00:00 crc kubenswrapper[4651]: I1011 06:00:00.443265 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6e184848-a157-4ab0-9a4b-0ed94df37016-config-volume\") pod \"collect-profiles-29336040-jjxxd\" (UID: \"6e184848-a157-4ab0-9a4b-0ed94df37016\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29336040-jjxxd" Oct 11 06:00:00 crc kubenswrapper[4651]: I1011 06:00:00.444112 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6e184848-a157-4ab0-9a4b-0ed94df37016-config-volume\") pod \"collect-profiles-29336040-jjxxd\" (UID: \"6e184848-a157-4ab0-9a4b-0ed94df37016\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29336040-jjxxd" Oct 11 06:00:00 crc kubenswrapper[4651]: I1011 06:00:00.452608 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6e184848-a157-4ab0-9a4b-0ed94df37016-secret-volume\") pod \"collect-profiles-29336040-jjxxd\" (UID: \"6e184848-a157-4ab0-9a4b-0ed94df37016\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29336040-jjxxd" Oct 11 06:00:00 crc kubenswrapper[4651]: I1011 06:00:00.465146 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dfvv9\" (UniqueName: \"kubernetes.io/projected/6e184848-a157-4ab0-9a4b-0ed94df37016-kube-api-access-dfvv9\") pod \"collect-profiles-29336040-jjxxd\" (UID: \"6e184848-a157-4ab0-9a4b-0ed94df37016\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29336040-jjxxd" Oct 11 06:00:00 crc kubenswrapper[4651]: I1011 06:00:00.478118 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29336040-jjxxd" Oct 11 06:00:00 crc kubenswrapper[4651]: I1011 06:00:00.974386 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29336040-jjxxd"] Oct 11 06:00:01 crc kubenswrapper[4651]: I1011 06:00:01.400958 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29336040-jjxxd" event={"ID":"6e184848-a157-4ab0-9a4b-0ed94df37016","Type":"ContainerStarted","Data":"5f5e418d3744b3274cca483a2afb53713a7a00d113abd5b31e99a924a3a75817"} Oct 11 06:00:02 crc kubenswrapper[4651]: I1011 06:00:02.440904 4651 generic.go:334] "Generic (PLEG): container finished" podID="6e184848-a157-4ab0-9a4b-0ed94df37016" containerID="40c7ee5eaf545300f4892f9de63de9a39971f13483c1ad6dbbbcb185eac5432b" exitCode=0 Oct 11 06:00:02 crc kubenswrapper[4651]: I1011 06:00:02.441577 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29336040-jjxxd" event={"ID":"6e184848-a157-4ab0-9a4b-0ed94df37016","Type":"ContainerDied","Data":"40c7ee5eaf545300f4892f9de63de9a39971f13483c1ad6dbbbcb185eac5432b"} Oct 11 06:00:03 crc kubenswrapper[4651]: I1011 06:00:03.924879 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29336040-jjxxd" Oct 11 06:00:04 crc kubenswrapper[4651]: I1011 06:00:04.015811 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dfvv9\" (UniqueName: \"kubernetes.io/projected/6e184848-a157-4ab0-9a4b-0ed94df37016-kube-api-access-dfvv9\") pod \"6e184848-a157-4ab0-9a4b-0ed94df37016\" (UID: \"6e184848-a157-4ab0-9a4b-0ed94df37016\") " Oct 11 06:00:04 crc kubenswrapper[4651]: I1011 06:00:04.016056 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6e184848-a157-4ab0-9a4b-0ed94df37016-secret-volume\") pod \"6e184848-a157-4ab0-9a4b-0ed94df37016\" (UID: \"6e184848-a157-4ab0-9a4b-0ed94df37016\") " Oct 11 06:00:04 crc kubenswrapper[4651]: I1011 06:00:04.016105 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6e184848-a157-4ab0-9a4b-0ed94df37016-config-volume\") pod \"6e184848-a157-4ab0-9a4b-0ed94df37016\" (UID: \"6e184848-a157-4ab0-9a4b-0ed94df37016\") " Oct 11 06:00:04 crc kubenswrapper[4651]: I1011 06:00:04.019382 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6e184848-a157-4ab0-9a4b-0ed94df37016-config-volume" (OuterVolumeSpecName: "config-volume") pod "6e184848-a157-4ab0-9a4b-0ed94df37016" (UID: "6e184848-a157-4ab0-9a4b-0ed94df37016"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 06:00:04 crc kubenswrapper[4651]: I1011 06:00:04.025983 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6e184848-a157-4ab0-9a4b-0ed94df37016-kube-api-access-dfvv9" (OuterVolumeSpecName: "kube-api-access-dfvv9") pod "6e184848-a157-4ab0-9a4b-0ed94df37016" (UID: "6e184848-a157-4ab0-9a4b-0ed94df37016"). InnerVolumeSpecName "kube-api-access-dfvv9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 06:00:04 crc kubenswrapper[4651]: I1011 06:00:04.050959 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6e184848-a157-4ab0-9a4b-0ed94df37016-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "6e184848-a157-4ab0-9a4b-0ed94df37016" (UID: "6e184848-a157-4ab0-9a4b-0ed94df37016"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 06:00:04 crc kubenswrapper[4651]: I1011 06:00:04.119592 4651 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6e184848-a157-4ab0-9a4b-0ed94df37016-config-volume\") on node \"crc\" DevicePath \"\"" Oct 11 06:00:04 crc kubenswrapper[4651]: I1011 06:00:04.119631 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dfvv9\" (UniqueName: \"kubernetes.io/projected/6e184848-a157-4ab0-9a4b-0ed94df37016-kube-api-access-dfvv9\") on node \"crc\" DevicePath \"\"" Oct 11 06:00:04 crc kubenswrapper[4651]: I1011 06:00:04.119643 4651 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6e184848-a157-4ab0-9a4b-0ed94df37016-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 11 06:00:04 crc kubenswrapper[4651]: I1011 06:00:04.463052 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29336040-jjxxd" event={"ID":"6e184848-a157-4ab0-9a4b-0ed94df37016","Type":"ContainerDied","Data":"5f5e418d3744b3274cca483a2afb53713a7a00d113abd5b31e99a924a3a75817"} Oct 11 06:00:04 crc kubenswrapper[4651]: I1011 06:00:04.463382 4651 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5f5e418d3744b3274cca483a2afb53713a7a00d113abd5b31e99a924a3a75817" Oct 11 06:00:04 crc kubenswrapper[4651]: I1011 06:00:04.463310 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29336040-jjxxd" Oct 11 06:00:05 crc kubenswrapper[4651]: I1011 06:00:05.013994 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29335995-g6dc8"] Oct 11 06:00:05 crc kubenswrapper[4651]: I1011 06:00:05.032591 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29335995-g6dc8"] Oct 11 06:00:05 crc kubenswrapper[4651]: I1011 06:00:05.892322 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3aacd804-c502-494e-9d34-b2547f0abb1e" path="/var/lib/kubelet/pods/3aacd804-c502-494e-9d34-b2547f0abb1e/volumes" Oct 11 06:00:16 crc kubenswrapper[4651]: I1011 06:00:16.310789 4651 patch_prober.go:28] interesting pod/machine-config-daemon-78jnv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 06:00:16 crc kubenswrapper[4651]: I1011 06:00:16.311577 4651 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 06:00:16 crc kubenswrapper[4651]: I1011 06:00:16.311655 4651 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" Oct 11 06:00:16 crc kubenswrapper[4651]: I1011 06:00:16.312870 4651 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"138046dd6ace80fbeb2ae1e92c9cc244c02ae0ec463960b56cf19a59f5eeaf98"} pod="openshift-machine-config-operator/machine-config-daemon-78jnv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 11 06:00:16 crc kubenswrapper[4651]: I1011 06:00:16.312966 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" containerName="machine-config-daemon" containerID="cri-o://138046dd6ace80fbeb2ae1e92c9cc244c02ae0ec463960b56cf19a59f5eeaf98" gracePeriod=600 Oct 11 06:00:16 crc kubenswrapper[4651]: I1011 06:00:16.580582 4651 generic.go:334] "Generic (PLEG): container finished" podID="519a1ae1-e964-48b0-8b61-835146df28c1" containerID="138046dd6ace80fbeb2ae1e92c9cc244c02ae0ec463960b56cf19a59f5eeaf98" exitCode=0 Oct 11 06:00:16 crc kubenswrapper[4651]: I1011 06:00:16.580780 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" event={"ID":"519a1ae1-e964-48b0-8b61-835146df28c1","Type":"ContainerDied","Data":"138046dd6ace80fbeb2ae1e92c9cc244c02ae0ec463960b56cf19a59f5eeaf98"} Oct 11 06:00:16 crc kubenswrapper[4651]: I1011 06:00:16.581063 4651 scope.go:117] "RemoveContainer" containerID="c912b3bc11354e42d7455ad2000403f0825b71426687440ea00315847957d4d3" Oct 11 06:00:17 crc kubenswrapper[4651]: I1011 06:00:17.603429 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" event={"ID":"519a1ae1-e964-48b0-8b61-835146df28c1","Type":"ContainerStarted","Data":"af3966c669b1128c8fd627334d4ac802f35b7d05ddd8b129bbb33262a566a534"} Oct 11 06:00:36 crc kubenswrapper[4651]: I1011 06:00:36.269988 4651 scope.go:117] "RemoveContainer" containerID="147778ea63189bf10092acd2abf04ffad81c42a12bce3201443eba5303685e3c" Oct 11 06:01:00 crc kubenswrapper[4651]: I1011 06:01:00.170810 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29336041-9r8w2"] Oct 11 06:01:00 crc kubenswrapper[4651]: E1011 06:01:00.172292 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e184848-a157-4ab0-9a4b-0ed94df37016" containerName="collect-profiles" Oct 11 06:01:00 crc kubenswrapper[4651]: I1011 06:01:00.172340 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e184848-a157-4ab0-9a4b-0ed94df37016" containerName="collect-profiles" Oct 11 06:01:00 crc kubenswrapper[4651]: I1011 06:01:00.172957 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="6e184848-a157-4ab0-9a4b-0ed94df37016" containerName="collect-profiles" Oct 11 06:01:00 crc kubenswrapper[4651]: I1011 06:01:00.174725 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29336041-9r8w2" Oct 11 06:01:00 crc kubenswrapper[4651]: I1011 06:01:00.190845 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29336041-9r8w2"] Oct 11 06:01:00 crc kubenswrapper[4651]: I1011 06:01:00.364582 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec1de355-cb27-46de-bed5-db22bf8a5c70-combined-ca-bundle\") pod \"keystone-cron-29336041-9r8w2\" (UID: \"ec1de355-cb27-46de-bed5-db22bf8a5c70\") " pod="openstack/keystone-cron-29336041-9r8w2" Oct 11 06:01:00 crc kubenswrapper[4651]: I1011 06:01:00.364715 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ec1de355-cb27-46de-bed5-db22bf8a5c70-fernet-keys\") pod \"keystone-cron-29336041-9r8w2\" (UID: \"ec1de355-cb27-46de-bed5-db22bf8a5c70\") " pod="openstack/keystone-cron-29336041-9r8w2" Oct 11 06:01:00 crc kubenswrapper[4651]: I1011 06:01:00.364783 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec1de355-cb27-46de-bed5-db22bf8a5c70-config-data\") pod \"keystone-cron-29336041-9r8w2\" (UID: \"ec1de355-cb27-46de-bed5-db22bf8a5c70\") " pod="openstack/keystone-cron-29336041-9r8w2" Oct 11 06:01:00 crc kubenswrapper[4651]: I1011 06:01:00.364891 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9c8wn\" (UniqueName: \"kubernetes.io/projected/ec1de355-cb27-46de-bed5-db22bf8a5c70-kube-api-access-9c8wn\") pod \"keystone-cron-29336041-9r8w2\" (UID: \"ec1de355-cb27-46de-bed5-db22bf8a5c70\") " pod="openstack/keystone-cron-29336041-9r8w2" Oct 11 06:01:00 crc kubenswrapper[4651]: I1011 06:01:00.466907 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec1de355-cb27-46de-bed5-db22bf8a5c70-combined-ca-bundle\") pod \"keystone-cron-29336041-9r8w2\" (UID: \"ec1de355-cb27-46de-bed5-db22bf8a5c70\") " pod="openstack/keystone-cron-29336041-9r8w2" Oct 11 06:01:00 crc kubenswrapper[4651]: I1011 06:01:00.466999 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ec1de355-cb27-46de-bed5-db22bf8a5c70-fernet-keys\") pod \"keystone-cron-29336041-9r8w2\" (UID: \"ec1de355-cb27-46de-bed5-db22bf8a5c70\") " pod="openstack/keystone-cron-29336041-9r8w2" Oct 11 06:01:00 crc kubenswrapper[4651]: I1011 06:01:00.467060 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec1de355-cb27-46de-bed5-db22bf8a5c70-config-data\") pod \"keystone-cron-29336041-9r8w2\" (UID: \"ec1de355-cb27-46de-bed5-db22bf8a5c70\") " pod="openstack/keystone-cron-29336041-9r8w2" Oct 11 06:01:00 crc kubenswrapper[4651]: I1011 06:01:00.467149 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9c8wn\" (UniqueName: \"kubernetes.io/projected/ec1de355-cb27-46de-bed5-db22bf8a5c70-kube-api-access-9c8wn\") pod \"keystone-cron-29336041-9r8w2\" (UID: \"ec1de355-cb27-46de-bed5-db22bf8a5c70\") " pod="openstack/keystone-cron-29336041-9r8w2" Oct 11 06:01:00 crc kubenswrapper[4651]: I1011 06:01:00.501554 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec1de355-cb27-46de-bed5-db22bf8a5c70-combined-ca-bundle\") pod \"keystone-cron-29336041-9r8w2\" (UID: \"ec1de355-cb27-46de-bed5-db22bf8a5c70\") " pod="openstack/keystone-cron-29336041-9r8w2" Oct 11 06:01:00 crc kubenswrapper[4651]: I1011 06:01:00.501722 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec1de355-cb27-46de-bed5-db22bf8a5c70-config-data\") pod \"keystone-cron-29336041-9r8w2\" (UID: \"ec1de355-cb27-46de-bed5-db22bf8a5c70\") " pod="openstack/keystone-cron-29336041-9r8w2" Oct 11 06:01:00 crc kubenswrapper[4651]: I1011 06:01:00.503443 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ec1de355-cb27-46de-bed5-db22bf8a5c70-fernet-keys\") pod \"keystone-cron-29336041-9r8w2\" (UID: \"ec1de355-cb27-46de-bed5-db22bf8a5c70\") " pod="openstack/keystone-cron-29336041-9r8w2" Oct 11 06:01:00 crc kubenswrapper[4651]: I1011 06:01:00.503648 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9c8wn\" (UniqueName: \"kubernetes.io/projected/ec1de355-cb27-46de-bed5-db22bf8a5c70-kube-api-access-9c8wn\") pod \"keystone-cron-29336041-9r8w2\" (UID: \"ec1de355-cb27-46de-bed5-db22bf8a5c70\") " pod="openstack/keystone-cron-29336041-9r8w2" Oct 11 06:01:00 crc kubenswrapper[4651]: I1011 06:01:00.548429 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29336041-9r8w2" Oct 11 06:01:01 crc kubenswrapper[4651]: I1011 06:01:01.030514 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29336041-9r8w2"] Oct 11 06:01:01 crc kubenswrapper[4651]: I1011 06:01:01.168042 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29336041-9r8w2" event={"ID":"ec1de355-cb27-46de-bed5-db22bf8a5c70","Type":"ContainerStarted","Data":"61c041a99caaf243612460bcb636fde29085404d5d3ff9b3ac4758ae3bc3071f"} Oct 11 06:01:02 crc kubenswrapper[4651]: I1011 06:01:02.185364 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29336041-9r8w2" event={"ID":"ec1de355-cb27-46de-bed5-db22bf8a5c70","Type":"ContainerStarted","Data":"13e8b8495bd9eeaf1a93a7de23a694e00b6d56944d032126ab1a5550823426c4"} Oct 11 06:01:02 crc kubenswrapper[4651]: I1011 06:01:02.215549 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29336041-9r8w2" podStartSLOduration=2.215520278 podStartE2EDuration="2.215520278s" podCreationTimestamp="2025-10-11 06:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 06:01:02.215145968 +0000 UTC m=+4183.111378804" watchObservedRunningTime="2025-10-11 06:01:02.215520278 +0000 UTC m=+4183.111753104" Oct 11 06:01:03 crc kubenswrapper[4651]: I1011 06:01:03.195272 4651 generic.go:334] "Generic (PLEG): container finished" podID="ec1de355-cb27-46de-bed5-db22bf8a5c70" containerID="13e8b8495bd9eeaf1a93a7de23a694e00b6d56944d032126ab1a5550823426c4" exitCode=0 Oct 11 06:01:03 crc kubenswrapper[4651]: I1011 06:01:03.195437 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29336041-9r8w2" event={"ID":"ec1de355-cb27-46de-bed5-db22bf8a5c70","Type":"ContainerDied","Data":"13e8b8495bd9eeaf1a93a7de23a694e00b6d56944d032126ab1a5550823426c4"} Oct 11 06:01:04 crc kubenswrapper[4651]: I1011 06:01:04.668485 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29336041-9r8w2" Oct 11 06:01:04 crc kubenswrapper[4651]: I1011 06:01:04.764228 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec1de355-cb27-46de-bed5-db22bf8a5c70-combined-ca-bundle\") pod \"ec1de355-cb27-46de-bed5-db22bf8a5c70\" (UID: \"ec1de355-cb27-46de-bed5-db22bf8a5c70\") " Oct 11 06:01:04 crc kubenswrapper[4651]: I1011 06:01:04.764414 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ec1de355-cb27-46de-bed5-db22bf8a5c70-fernet-keys\") pod \"ec1de355-cb27-46de-bed5-db22bf8a5c70\" (UID: \"ec1de355-cb27-46de-bed5-db22bf8a5c70\") " Oct 11 06:01:04 crc kubenswrapper[4651]: I1011 06:01:04.764592 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec1de355-cb27-46de-bed5-db22bf8a5c70-config-data\") pod \"ec1de355-cb27-46de-bed5-db22bf8a5c70\" (UID: \"ec1de355-cb27-46de-bed5-db22bf8a5c70\") " Oct 11 06:01:04 crc kubenswrapper[4651]: I1011 06:01:04.764652 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9c8wn\" (UniqueName: \"kubernetes.io/projected/ec1de355-cb27-46de-bed5-db22bf8a5c70-kube-api-access-9c8wn\") pod \"ec1de355-cb27-46de-bed5-db22bf8a5c70\" (UID: \"ec1de355-cb27-46de-bed5-db22bf8a5c70\") " Oct 11 06:01:04 crc kubenswrapper[4651]: I1011 06:01:04.773179 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ec1de355-cb27-46de-bed5-db22bf8a5c70-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "ec1de355-cb27-46de-bed5-db22bf8a5c70" (UID: "ec1de355-cb27-46de-bed5-db22bf8a5c70"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 06:01:04 crc kubenswrapper[4651]: I1011 06:01:04.790554 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ec1de355-cb27-46de-bed5-db22bf8a5c70-kube-api-access-9c8wn" (OuterVolumeSpecName: "kube-api-access-9c8wn") pod "ec1de355-cb27-46de-bed5-db22bf8a5c70" (UID: "ec1de355-cb27-46de-bed5-db22bf8a5c70"). InnerVolumeSpecName "kube-api-access-9c8wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 06:01:04 crc kubenswrapper[4651]: I1011 06:01:04.796957 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ec1de355-cb27-46de-bed5-db22bf8a5c70-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ec1de355-cb27-46de-bed5-db22bf8a5c70" (UID: "ec1de355-cb27-46de-bed5-db22bf8a5c70"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 06:01:04 crc kubenswrapper[4651]: I1011 06:01:04.823089 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ec1de355-cb27-46de-bed5-db22bf8a5c70-config-data" (OuterVolumeSpecName: "config-data") pod "ec1de355-cb27-46de-bed5-db22bf8a5c70" (UID: "ec1de355-cb27-46de-bed5-db22bf8a5c70"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 06:01:04 crc kubenswrapper[4651]: I1011 06:01:04.868235 4651 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec1de355-cb27-46de-bed5-db22bf8a5c70-config-data\") on node \"crc\" DevicePath \"\"" Oct 11 06:01:04 crc kubenswrapper[4651]: I1011 06:01:04.868301 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9c8wn\" (UniqueName: \"kubernetes.io/projected/ec1de355-cb27-46de-bed5-db22bf8a5c70-kube-api-access-9c8wn\") on node \"crc\" DevicePath \"\"" Oct 11 06:01:04 crc kubenswrapper[4651]: I1011 06:01:04.868377 4651 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec1de355-cb27-46de-bed5-db22bf8a5c70-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 06:01:04 crc kubenswrapper[4651]: I1011 06:01:04.868405 4651 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ec1de355-cb27-46de-bed5-db22bf8a5c70-fernet-keys\") on node \"crc\" DevicePath \"\"" Oct 11 06:01:05 crc kubenswrapper[4651]: I1011 06:01:05.224228 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29336041-9r8w2" event={"ID":"ec1de355-cb27-46de-bed5-db22bf8a5c70","Type":"ContainerDied","Data":"61c041a99caaf243612460bcb636fde29085404d5d3ff9b3ac4758ae3bc3071f"} Oct 11 06:01:05 crc kubenswrapper[4651]: I1011 06:01:05.224551 4651 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="61c041a99caaf243612460bcb636fde29085404d5d3ff9b3ac4758ae3bc3071f" Oct 11 06:01:05 crc kubenswrapper[4651]: I1011 06:01:05.224291 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29336041-9r8w2" Oct 11 06:01:18 crc kubenswrapper[4651]: I1011 06:01:18.426239 4651 generic.go:334] "Generic (PLEG): container finished" podID="5e95a87d-f2af-4b16-addc-86141bbe88ed" containerID="d1cd6cf38c4796790c1d7566c9bd7002cdabb3f83454589783359875d8e4b641" exitCode=0 Oct 11 06:01:18 crc kubenswrapper[4651]: I1011 06:01:18.426437 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-gr7gt/must-gather-k45rb" event={"ID":"5e95a87d-f2af-4b16-addc-86141bbe88ed","Type":"ContainerDied","Data":"d1cd6cf38c4796790c1d7566c9bd7002cdabb3f83454589783359875d8e4b641"} Oct 11 06:01:18 crc kubenswrapper[4651]: I1011 06:01:18.428302 4651 scope.go:117] "RemoveContainer" containerID="d1cd6cf38c4796790c1d7566c9bd7002cdabb3f83454589783359875d8e4b641" Oct 11 06:01:18 crc kubenswrapper[4651]: I1011 06:01:18.554746 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-gr7gt_must-gather-k45rb_5e95a87d-f2af-4b16-addc-86141bbe88ed/gather/0.log" Oct 11 06:01:23 crc kubenswrapper[4651]: I1011 06:01:23.056404 4651 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-p6xcx"] Oct 11 06:01:23 crc kubenswrapper[4651]: E1011 06:01:23.058073 4651 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec1de355-cb27-46de-bed5-db22bf8a5c70" containerName="keystone-cron" Oct 11 06:01:23 crc kubenswrapper[4651]: I1011 06:01:23.058096 4651 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec1de355-cb27-46de-bed5-db22bf8a5c70" containerName="keystone-cron" Oct 11 06:01:23 crc kubenswrapper[4651]: I1011 06:01:23.058507 4651 memory_manager.go:354] "RemoveStaleState removing state" podUID="ec1de355-cb27-46de-bed5-db22bf8a5c70" containerName="keystone-cron" Oct 11 06:01:23 crc kubenswrapper[4651]: I1011 06:01:23.061321 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-p6xcx" Oct 11 06:01:23 crc kubenswrapper[4651]: I1011 06:01:23.069214 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-p6xcx"] Oct 11 06:01:23 crc kubenswrapper[4651]: I1011 06:01:23.212543 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kc6tb\" (UniqueName: \"kubernetes.io/projected/678a4ff7-faaf-4325-81f1-064d9e066697-kube-api-access-kc6tb\") pod \"certified-operators-p6xcx\" (UID: \"678a4ff7-faaf-4325-81f1-064d9e066697\") " pod="openshift-marketplace/certified-operators-p6xcx" Oct 11 06:01:23 crc kubenswrapper[4651]: I1011 06:01:23.212842 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/678a4ff7-faaf-4325-81f1-064d9e066697-utilities\") pod \"certified-operators-p6xcx\" (UID: \"678a4ff7-faaf-4325-81f1-064d9e066697\") " pod="openshift-marketplace/certified-operators-p6xcx" Oct 11 06:01:23 crc kubenswrapper[4651]: I1011 06:01:23.212897 4651 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/678a4ff7-faaf-4325-81f1-064d9e066697-catalog-content\") pod \"certified-operators-p6xcx\" (UID: \"678a4ff7-faaf-4325-81f1-064d9e066697\") " pod="openshift-marketplace/certified-operators-p6xcx" Oct 11 06:01:23 crc kubenswrapper[4651]: I1011 06:01:23.315960 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/678a4ff7-faaf-4325-81f1-064d9e066697-utilities\") pod \"certified-operators-p6xcx\" (UID: \"678a4ff7-faaf-4325-81f1-064d9e066697\") " pod="openshift-marketplace/certified-operators-p6xcx" Oct 11 06:01:23 crc kubenswrapper[4651]: I1011 06:01:23.316024 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/678a4ff7-faaf-4325-81f1-064d9e066697-catalog-content\") pod \"certified-operators-p6xcx\" (UID: \"678a4ff7-faaf-4325-81f1-064d9e066697\") " pod="openshift-marketplace/certified-operators-p6xcx" Oct 11 06:01:23 crc kubenswrapper[4651]: I1011 06:01:23.316183 4651 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kc6tb\" (UniqueName: \"kubernetes.io/projected/678a4ff7-faaf-4325-81f1-064d9e066697-kube-api-access-kc6tb\") pod \"certified-operators-p6xcx\" (UID: \"678a4ff7-faaf-4325-81f1-064d9e066697\") " pod="openshift-marketplace/certified-operators-p6xcx" Oct 11 06:01:23 crc kubenswrapper[4651]: I1011 06:01:23.316877 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/678a4ff7-faaf-4325-81f1-064d9e066697-utilities\") pod \"certified-operators-p6xcx\" (UID: \"678a4ff7-faaf-4325-81f1-064d9e066697\") " pod="openshift-marketplace/certified-operators-p6xcx" Oct 11 06:01:23 crc kubenswrapper[4651]: I1011 06:01:23.317296 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/678a4ff7-faaf-4325-81f1-064d9e066697-catalog-content\") pod \"certified-operators-p6xcx\" (UID: \"678a4ff7-faaf-4325-81f1-064d9e066697\") " pod="openshift-marketplace/certified-operators-p6xcx" Oct 11 06:01:23 crc kubenswrapper[4651]: I1011 06:01:23.351000 4651 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kc6tb\" (UniqueName: \"kubernetes.io/projected/678a4ff7-faaf-4325-81f1-064d9e066697-kube-api-access-kc6tb\") pod \"certified-operators-p6xcx\" (UID: \"678a4ff7-faaf-4325-81f1-064d9e066697\") " pod="openshift-marketplace/certified-operators-p6xcx" Oct 11 06:01:23 crc kubenswrapper[4651]: I1011 06:01:23.387704 4651 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-p6xcx" Oct 11 06:01:23 crc kubenswrapper[4651]: I1011 06:01:23.851075 4651 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-p6xcx"] Oct 11 06:01:24 crc kubenswrapper[4651]: I1011 06:01:24.499473 4651 generic.go:334] "Generic (PLEG): container finished" podID="678a4ff7-faaf-4325-81f1-064d9e066697" containerID="29e53957f4648614267dd2ee0919b082e332b358c2d7fbcc3b3148eaef7336e4" exitCode=0 Oct 11 06:01:24 crc kubenswrapper[4651]: I1011 06:01:24.500011 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-p6xcx" event={"ID":"678a4ff7-faaf-4325-81f1-064d9e066697","Type":"ContainerDied","Data":"29e53957f4648614267dd2ee0919b082e332b358c2d7fbcc3b3148eaef7336e4"} Oct 11 06:01:24 crc kubenswrapper[4651]: I1011 06:01:24.500389 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-p6xcx" event={"ID":"678a4ff7-faaf-4325-81f1-064d9e066697","Type":"ContainerStarted","Data":"dfa40ed0ca4f98083d2996a17bbce3a11e8c15c1a6619d124bc1fbfd5ef74ee6"} Oct 11 06:01:25 crc kubenswrapper[4651]: I1011 06:01:25.513119 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-p6xcx" event={"ID":"678a4ff7-faaf-4325-81f1-064d9e066697","Type":"ContainerStarted","Data":"73ea6400e8d24ddd807699b74a87bccd17157b02cb4c93818935f5987dfdbc08"} Oct 11 06:01:26 crc kubenswrapper[4651]: I1011 06:01:26.522406 4651 generic.go:334] "Generic (PLEG): container finished" podID="678a4ff7-faaf-4325-81f1-064d9e066697" containerID="73ea6400e8d24ddd807699b74a87bccd17157b02cb4c93818935f5987dfdbc08" exitCode=0 Oct 11 06:01:26 crc kubenswrapper[4651]: I1011 06:01:26.522518 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-p6xcx" event={"ID":"678a4ff7-faaf-4325-81f1-064d9e066697","Type":"ContainerDied","Data":"73ea6400e8d24ddd807699b74a87bccd17157b02cb4c93818935f5987dfdbc08"} Oct 11 06:01:27 crc kubenswrapper[4651]: I1011 06:01:27.541304 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-p6xcx" event={"ID":"678a4ff7-faaf-4325-81f1-064d9e066697","Type":"ContainerStarted","Data":"a9cbe99b4dc0b2477b5e304d38ff766c16f6dde84e938d3cd40947172ecc6344"} Oct 11 06:01:27 crc kubenswrapper[4651]: I1011 06:01:27.577171 4651 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-p6xcx" podStartSLOduration=2.104967846 podStartE2EDuration="4.577132906s" podCreationTimestamp="2025-10-11 06:01:23 +0000 UTC" firstStartedPulling="2025-10-11 06:01:24.504096541 +0000 UTC m=+4205.400329377" lastFinishedPulling="2025-10-11 06:01:26.976261641 +0000 UTC m=+4207.872494437" observedRunningTime="2025-10-11 06:01:27.574691334 +0000 UTC m=+4208.470924190" watchObservedRunningTime="2025-10-11 06:01:27.577132906 +0000 UTC m=+4208.473365712" Oct 11 06:01:28 crc kubenswrapper[4651]: I1011 06:01:28.295220 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-gr7gt/must-gather-k45rb"] Oct 11 06:01:28 crc kubenswrapper[4651]: I1011 06:01:28.295763 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-gr7gt/must-gather-k45rb" podUID="5e95a87d-f2af-4b16-addc-86141bbe88ed" containerName="copy" containerID="cri-o://6dac6a6407628b53bc50c5f46f84ab568d4488bb21284fc8df08db35b91dd4e9" gracePeriod=2 Oct 11 06:01:28 crc kubenswrapper[4651]: I1011 06:01:28.305152 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-gr7gt/must-gather-k45rb"] Oct 11 06:01:28 crc kubenswrapper[4651]: I1011 06:01:28.555880 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-gr7gt_must-gather-k45rb_5e95a87d-f2af-4b16-addc-86141bbe88ed/copy/0.log" Oct 11 06:01:28 crc kubenswrapper[4651]: I1011 06:01:28.556485 4651 generic.go:334] "Generic (PLEG): container finished" podID="5e95a87d-f2af-4b16-addc-86141bbe88ed" containerID="6dac6a6407628b53bc50c5f46f84ab568d4488bb21284fc8df08db35b91dd4e9" exitCode=143 Oct 11 06:01:29 crc kubenswrapper[4651]: I1011 06:01:29.232250 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-gr7gt_must-gather-k45rb_5e95a87d-f2af-4b16-addc-86141bbe88ed/copy/0.log" Oct 11 06:01:29 crc kubenswrapper[4651]: I1011 06:01:29.232785 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-gr7gt/must-gather-k45rb" Oct 11 06:01:29 crc kubenswrapper[4651]: I1011 06:01:29.333405 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z9tpf\" (UniqueName: \"kubernetes.io/projected/5e95a87d-f2af-4b16-addc-86141bbe88ed-kube-api-access-z9tpf\") pod \"5e95a87d-f2af-4b16-addc-86141bbe88ed\" (UID: \"5e95a87d-f2af-4b16-addc-86141bbe88ed\") " Oct 11 06:01:29 crc kubenswrapper[4651]: I1011 06:01:29.333482 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/5e95a87d-f2af-4b16-addc-86141bbe88ed-must-gather-output\") pod \"5e95a87d-f2af-4b16-addc-86141bbe88ed\" (UID: \"5e95a87d-f2af-4b16-addc-86141bbe88ed\") " Oct 11 06:01:29 crc kubenswrapper[4651]: I1011 06:01:29.343486 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5e95a87d-f2af-4b16-addc-86141bbe88ed-kube-api-access-z9tpf" (OuterVolumeSpecName: "kube-api-access-z9tpf") pod "5e95a87d-f2af-4b16-addc-86141bbe88ed" (UID: "5e95a87d-f2af-4b16-addc-86141bbe88ed"). InnerVolumeSpecName "kube-api-access-z9tpf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 06:01:29 crc kubenswrapper[4651]: I1011 06:01:29.435762 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z9tpf\" (UniqueName: \"kubernetes.io/projected/5e95a87d-f2af-4b16-addc-86141bbe88ed-kube-api-access-z9tpf\") on node \"crc\" DevicePath \"\"" Oct 11 06:01:29 crc kubenswrapper[4651]: I1011 06:01:29.495312 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5e95a87d-f2af-4b16-addc-86141bbe88ed-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "5e95a87d-f2af-4b16-addc-86141bbe88ed" (UID: "5e95a87d-f2af-4b16-addc-86141bbe88ed"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 06:01:29 crc kubenswrapper[4651]: I1011 06:01:29.537960 4651 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/5e95a87d-f2af-4b16-addc-86141bbe88ed-must-gather-output\") on node \"crc\" DevicePath \"\"" Oct 11 06:01:29 crc kubenswrapper[4651]: I1011 06:01:29.566416 4651 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-gr7gt_must-gather-k45rb_5e95a87d-f2af-4b16-addc-86141bbe88ed/copy/0.log" Oct 11 06:01:29 crc kubenswrapper[4651]: I1011 06:01:29.567082 4651 scope.go:117] "RemoveContainer" containerID="6dac6a6407628b53bc50c5f46f84ab568d4488bb21284fc8df08db35b91dd4e9" Oct 11 06:01:29 crc kubenswrapper[4651]: I1011 06:01:29.567116 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-gr7gt/must-gather-k45rb" Oct 11 06:01:29 crc kubenswrapper[4651]: I1011 06:01:29.591930 4651 scope.go:117] "RemoveContainer" containerID="d1cd6cf38c4796790c1d7566c9bd7002cdabb3f83454589783359875d8e4b641" Oct 11 06:01:29 crc kubenswrapper[4651]: I1011 06:01:29.889414 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5e95a87d-f2af-4b16-addc-86141bbe88ed" path="/var/lib/kubelet/pods/5e95a87d-f2af-4b16-addc-86141bbe88ed/volumes" Oct 11 06:01:33 crc kubenswrapper[4651]: I1011 06:01:33.388094 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-p6xcx" Oct 11 06:01:33 crc kubenswrapper[4651]: I1011 06:01:33.388757 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-p6xcx" Oct 11 06:01:33 crc kubenswrapper[4651]: I1011 06:01:33.473842 4651 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-p6xcx" Oct 11 06:01:33 crc kubenswrapper[4651]: I1011 06:01:33.691512 4651 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-p6xcx" Oct 11 06:01:33 crc kubenswrapper[4651]: I1011 06:01:33.752312 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-p6xcx"] Oct 11 06:01:35 crc kubenswrapper[4651]: I1011 06:01:35.628722 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-p6xcx" podUID="678a4ff7-faaf-4325-81f1-064d9e066697" containerName="registry-server" containerID="cri-o://a9cbe99b4dc0b2477b5e304d38ff766c16f6dde84e938d3cd40947172ecc6344" gracePeriod=2 Oct 11 06:01:36 crc kubenswrapper[4651]: I1011 06:01:36.633358 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-p6xcx" Oct 11 06:01:36 crc kubenswrapper[4651]: I1011 06:01:36.643860 4651 generic.go:334] "Generic (PLEG): container finished" podID="678a4ff7-faaf-4325-81f1-064d9e066697" containerID="a9cbe99b4dc0b2477b5e304d38ff766c16f6dde84e938d3cd40947172ecc6344" exitCode=0 Oct 11 06:01:36 crc kubenswrapper[4651]: I1011 06:01:36.643877 4651 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-p6xcx" Oct 11 06:01:36 crc kubenswrapper[4651]: I1011 06:01:36.643908 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-p6xcx" event={"ID":"678a4ff7-faaf-4325-81f1-064d9e066697","Type":"ContainerDied","Data":"a9cbe99b4dc0b2477b5e304d38ff766c16f6dde84e938d3cd40947172ecc6344"} Oct 11 06:01:36 crc kubenswrapper[4651]: I1011 06:01:36.643975 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-p6xcx" event={"ID":"678a4ff7-faaf-4325-81f1-064d9e066697","Type":"ContainerDied","Data":"dfa40ed0ca4f98083d2996a17bbce3a11e8c15c1a6619d124bc1fbfd5ef74ee6"} Oct 11 06:01:36 crc kubenswrapper[4651]: I1011 06:01:36.644003 4651 scope.go:117] "RemoveContainer" containerID="a9cbe99b4dc0b2477b5e304d38ff766c16f6dde84e938d3cd40947172ecc6344" Oct 11 06:01:36 crc kubenswrapper[4651]: I1011 06:01:36.680888 4651 scope.go:117] "RemoveContainer" containerID="73ea6400e8d24ddd807699b74a87bccd17157b02cb4c93818935f5987dfdbc08" Oct 11 06:01:36 crc kubenswrapper[4651]: I1011 06:01:36.718376 4651 scope.go:117] "RemoveContainer" containerID="29e53957f4648614267dd2ee0919b082e332b358c2d7fbcc3b3148eaef7336e4" Oct 11 06:01:36 crc kubenswrapper[4651]: I1011 06:01:36.758463 4651 scope.go:117] "RemoveContainer" containerID="a9cbe99b4dc0b2477b5e304d38ff766c16f6dde84e938d3cd40947172ecc6344" Oct 11 06:01:36 crc kubenswrapper[4651]: E1011 06:01:36.759000 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a9cbe99b4dc0b2477b5e304d38ff766c16f6dde84e938d3cd40947172ecc6344\": container with ID starting with a9cbe99b4dc0b2477b5e304d38ff766c16f6dde84e938d3cd40947172ecc6344 not found: ID does not exist" containerID="a9cbe99b4dc0b2477b5e304d38ff766c16f6dde84e938d3cd40947172ecc6344" Oct 11 06:01:36 crc kubenswrapper[4651]: I1011 06:01:36.759053 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a9cbe99b4dc0b2477b5e304d38ff766c16f6dde84e938d3cd40947172ecc6344"} err="failed to get container status \"a9cbe99b4dc0b2477b5e304d38ff766c16f6dde84e938d3cd40947172ecc6344\": rpc error: code = NotFound desc = could not find container \"a9cbe99b4dc0b2477b5e304d38ff766c16f6dde84e938d3cd40947172ecc6344\": container with ID starting with a9cbe99b4dc0b2477b5e304d38ff766c16f6dde84e938d3cd40947172ecc6344 not found: ID does not exist" Oct 11 06:01:36 crc kubenswrapper[4651]: I1011 06:01:36.759090 4651 scope.go:117] "RemoveContainer" containerID="73ea6400e8d24ddd807699b74a87bccd17157b02cb4c93818935f5987dfdbc08" Oct 11 06:01:36 crc kubenswrapper[4651]: E1011 06:01:36.759569 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"73ea6400e8d24ddd807699b74a87bccd17157b02cb4c93818935f5987dfdbc08\": container with ID starting with 73ea6400e8d24ddd807699b74a87bccd17157b02cb4c93818935f5987dfdbc08 not found: ID does not exist" containerID="73ea6400e8d24ddd807699b74a87bccd17157b02cb4c93818935f5987dfdbc08" Oct 11 06:01:36 crc kubenswrapper[4651]: I1011 06:01:36.759619 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"73ea6400e8d24ddd807699b74a87bccd17157b02cb4c93818935f5987dfdbc08"} err="failed to get container status \"73ea6400e8d24ddd807699b74a87bccd17157b02cb4c93818935f5987dfdbc08\": rpc error: code = NotFound desc = could not find container \"73ea6400e8d24ddd807699b74a87bccd17157b02cb4c93818935f5987dfdbc08\": container with ID starting with 73ea6400e8d24ddd807699b74a87bccd17157b02cb4c93818935f5987dfdbc08 not found: ID does not exist" Oct 11 06:01:36 crc kubenswrapper[4651]: I1011 06:01:36.759654 4651 scope.go:117] "RemoveContainer" containerID="29e53957f4648614267dd2ee0919b082e332b358c2d7fbcc3b3148eaef7336e4" Oct 11 06:01:36 crc kubenswrapper[4651]: E1011 06:01:36.760162 4651 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"29e53957f4648614267dd2ee0919b082e332b358c2d7fbcc3b3148eaef7336e4\": container with ID starting with 29e53957f4648614267dd2ee0919b082e332b358c2d7fbcc3b3148eaef7336e4 not found: ID does not exist" containerID="29e53957f4648614267dd2ee0919b082e332b358c2d7fbcc3b3148eaef7336e4" Oct 11 06:01:36 crc kubenswrapper[4651]: I1011 06:01:36.760197 4651 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"29e53957f4648614267dd2ee0919b082e332b358c2d7fbcc3b3148eaef7336e4"} err="failed to get container status \"29e53957f4648614267dd2ee0919b082e332b358c2d7fbcc3b3148eaef7336e4\": rpc error: code = NotFound desc = could not find container \"29e53957f4648614267dd2ee0919b082e332b358c2d7fbcc3b3148eaef7336e4\": container with ID starting with 29e53957f4648614267dd2ee0919b082e332b358c2d7fbcc3b3148eaef7336e4 not found: ID does not exist" Oct 11 06:01:36 crc kubenswrapper[4651]: I1011 06:01:36.822252 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/678a4ff7-faaf-4325-81f1-064d9e066697-utilities\") pod \"678a4ff7-faaf-4325-81f1-064d9e066697\" (UID: \"678a4ff7-faaf-4325-81f1-064d9e066697\") " Oct 11 06:01:36 crc kubenswrapper[4651]: I1011 06:01:36.822833 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kc6tb\" (UniqueName: \"kubernetes.io/projected/678a4ff7-faaf-4325-81f1-064d9e066697-kube-api-access-kc6tb\") pod \"678a4ff7-faaf-4325-81f1-064d9e066697\" (UID: \"678a4ff7-faaf-4325-81f1-064d9e066697\") " Oct 11 06:01:36 crc kubenswrapper[4651]: I1011 06:01:36.822948 4651 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/678a4ff7-faaf-4325-81f1-064d9e066697-catalog-content\") pod \"678a4ff7-faaf-4325-81f1-064d9e066697\" (UID: \"678a4ff7-faaf-4325-81f1-064d9e066697\") " Oct 11 06:01:36 crc kubenswrapper[4651]: I1011 06:01:36.826179 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/678a4ff7-faaf-4325-81f1-064d9e066697-utilities" (OuterVolumeSpecName: "utilities") pod "678a4ff7-faaf-4325-81f1-064d9e066697" (UID: "678a4ff7-faaf-4325-81f1-064d9e066697"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 06:01:36 crc kubenswrapper[4651]: I1011 06:01:36.832632 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/678a4ff7-faaf-4325-81f1-064d9e066697-kube-api-access-kc6tb" (OuterVolumeSpecName: "kube-api-access-kc6tb") pod "678a4ff7-faaf-4325-81f1-064d9e066697" (UID: "678a4ff7-faaf-4325-81f1-064d9e066697"). InnerVolumeSpecName "kube-api-access-kc6tb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 06:01:36 crc kubenswrapper[4651]: I1011 06:01:36.887744 4651 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/678a4ff7-faaf-4325-81f1-064d9e066697-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "678a4ff7-faaf-4325-81f1-064d9e066697" (UID: "678a4ff7-faaf-4325-81f1-064d9e066697"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 06:01:36 crc kubenswrapper[4651]: I1011 06:01:36.925463 4651 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/678a4ff7-faaf-4325-81f1-064d9e066697-utilities\") on node \"crc\" DevicePath \"\"" Oct 11 06:01:36 crc kubenswrapper[4651]: I1011 06:01:36.925516 4651 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kc6tb\" (UniqueName: \"kubernetes.io/projected/678a4ff7-faaf-4325-81f1-064d9e066697-kube-api-access-kc6tb\") on node \"crc\" DevicePath \"\"" Oct 11 06:01:36 crc kubenswrapper[4651]: I1011 06:01:36.925552 4651 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/678a4ff7-faaf-4325-81f1-064d9e066697-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 11 06:01:36 crc kubenswrapper[4651]: I1011 06:01:36.985945 4651 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-p6xcx"] Oct 11 06:01:36 crc kubenswrapper[4651]: I1011 06:01:36.994351 4651 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-p6xcx"] Oct 11 06:01:37 crc kubenswrapper[4651]: I1011 06:01:37.884168 4651 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="678a4ff7-faaf-4325-81f1-064d9e066697" path="/var/lib/kubelet/pods/678a4ff7-faaf-4325-81f1-064d9e066697/volumes" Oct 11 06:02:16 crc kubenswrapper[4651]: I1011 06:02:16.310516 4651 patch_prober.go:28] interesting pod/machine-config-daemon-78jnv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 06:02:16 crc kubenswrapper[4651]: I1011 06:02:16.313620 4651 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 06:02:46 crc kubenswrapper[4651]: I1011 06:02:46.310729 4651 patch_prober.go:28] interesting pod/machine-config-daemon-78jnv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 06:02:46 crc kubenswrapper[4651]: I1011 06:02:46.311883 4651 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 06:03:16 crc kubenswrapper[4651]: I1011 06:03:16.310731 4651 patch_prober.go:28] interesting pod/machine-config-daemon-78jnv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 06:03:16 crc kubenswrapper[4651]: I1011 06:03:16.311236 4651 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 06:03:16 crc kubenswrapper[4651]: I1011 06:03:16.311290 4651 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" Oct 11 06:03:16 crc kubenswrapper[4651]: I1011 06:03:16.312292 4651 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"af3966c669b1128c8fd627334d4ac802f35b7d05ddd8b129bbb33262a566a534"} pod="openshift-machine-config-operator/machine-config-daemon-78jnv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 11 06:03:16 crc kubenswrapper[4651]: I1011 06:03:16.312391 4651 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" containerName="machine-config-daemon" containerID="cri-o://af3966c669b1128c8fd627334d4ac802f35b7d05ddd8b129bbb33262a566a534" gracePeriod=600 Oct 11 06:03:16 crc kubenswrapper[4651]: E1011 06:03:16.440663 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 06:03:16 crc kubenswrapper[4651]: I1011 06:03:16.868418 4651 generic.go:334] "Generic (PLEG): container finished" podID="519a1ae1-e964-48b0-8b61-835146df28c1" containerID="af3966c669b1128c8fd627334d4ac802f35b7d05ddd8b129bbb33262a566a534" exitCode=0 Oct 11 06:03:16 crc kubenswrapper[4651]: I1011 06:03:16.868555 4651 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" event={"ID":"519a1ae1-e964-48b0-8b61-835146df28c1","Type":"ContainerDied","Data":"af3966c669b1128c8fd627334d4ac802f35b7d05ddd8b129bbb33262a566a534"} Oct 11 06:03:16 crc kubenswrapper[4651]: I1011 06:03:16.868633 4651 scope.go:117] "RemoveContainer" containerID="138046dd6ace80fbeb2ae1e92c9cc244c02ae0ec463960b56cf19a59f5eeaf98" Oct 11 06:03:16 crc kubenswrapper[4651]: I1011 06:03:16.871177 4651 scope.go:117] "RemoveContainer" containerID="af3966c669b1128c8fd627334d4ac802f35b7d05ddd8b129bbb33262a566a534" Oct 11 06:03:16 crc kubenswrapper[4651]: E1011 06:03:16.872325 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 06:03:28 crc kubenswrapper[4651]: I1011 06:03:28.870165 4651 scope.go:117] "RemoveContainer" containerID="af3966c669b1128c8fd627334d4ac802f35b7d05ddd8b129bbb33262a566a534" Oct 11 06:03:28 crc kubenswrapper[4651]: E1011 06:03:28.871436 4651 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-78jnv_openshift-machine-config-operator(519a1ae1-e964-48b0-8b61-835146df28c1)\"" pod="openshift-machine-config-operator/machine-config-daemon-78jnv" podUID="519a1ae1-e964-48b0-8b61-835146df28c1" Oct 11 06:03:36 crc kubenswrapper[4651]: I1011 06:03:36.475244 4651 scope.go:117] "RemoveContainer" containerID="c14c5bde63f8c30df4bcfab229a9c60dd355db5633f674b5e9480e14858b8942" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515072371501024446 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015072371502017364 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015072360611016506 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015072360612015457 5ustar corecore